hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7ce74a57958dcd3189bb60c32bb883591752b0e8 | 108 | py | Python | test.py | filipdbrsk/ks_mole | fceefe94c05e552f2728c0a319c4270267e80e3c | [
"Apache-2.0"
] | null | null | null | test.py | filipdbrsk/ks_mole | fceefe94c05e552f2728c0a319c4270267e80e3c | [
"Apache-2.0"
] | null | null | null | test.py | filipdbrsk/ks_mole | fceefe94c05e552f2728c0a319c4270267e80e3c | [
"Apache-2.0"
] | null | null | null | import json
fname="lemma.json"
f=open(fname, "r")
for line in f:
j=json.loads(line)
print j["Arizona"]
| 10.8 | 19 | 0.666667 | 20 | 108 | 3.6 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.157407 | 108 | 9 | 20 | 12 | 0.791209 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0.166667 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
7ceb13de947c7f1b34841fb4e888c8635cc18e0b | 71 | py | Python | src/rewrapped/__init__.py | hansi-b/reWrapped | eb3d29ee668acdc8894288d4d4b3c9046a7c5c72 | [
"MIT"
] | 1 | 2018-03-08T20:58:12.000Z | 2018-03-08T20:58:12.000Z | src/rewrapped/__init__.py | hansi-b/reWrapped | eb3d29ee668acdc8894288d4d4b3c9046a7c5c72 | [
"MIT"
] | null | null | null | src/rewrapped/__init__.py | hansi-b/reWrapped | eb3d29ee668acdc8894288d4d4b3c9046a7c5c72 | [
"MIT"
] | null | null | null | from rewrapped.patterns import ReWrap
__all__ = ['matched', "ReWrap"]
| 17.75 | 37 | 0.746479 | 8 | 71 | 6.125 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126761 | 71 | 3 | 38 | 23.666667 | 0.790323 | 0 | 0 | 0 | 0 | 0 | 0.183099 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
6b0a5ac630fe9b960741a3094b9c371954eee33a | 65 | py | Python | airrun/cli/info.py | Hanlen520/airtest_run | 75111163443fd7ab3dfaf20f1bdc90102abba29a | [
"Apache-2.0"
] | 1 | 2020-04-15T10:49:09.000Z | 2020-04-15T10:49:09.000Z | airrun/cli/info.py | Hanlen520/airtest_run | 75111163443fd7ab3dfaf20f1bdc90102abba29a | [
"Apache-2.0"
] | null | null | null | airrun/cli/info.py | Hanlen520/airtest_run | 75111163443fd7ab3dfaf20f1bdc90102abba29a | [
"Apache-2.0"
] | 1 | 2020-07-08T04:23:40.000Z | 2020-07-08T04:23:40.000Z |
infos = """
AirRun Project For running airtest project easy!
""" | 16.25 | 48 | 0.707692 | 8 | 65 | 5.75 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169231 | 65 | 4 | 49 | 16.25 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6b138815d9b5672082cb8f214e43e041f07d02fa | 14,273 | py | Python | opencga-client/src/main/python/pyopencga/rest_clients/individual_client.py | fabbondanza/opencga | a00ff9b26b94585212c09cce4a6d23d161b0851e | [
"Apache-2.0"
] | null | null | null | opencga-client/src/main/python/pyopencga/rest_clients/individual_client.py | fabbondanza/opencga | a00ff9b26b94585212c09cce4a6d23d161b0851e | [
"Apache-2.0"
] | null | null | null | opencga-client/src/main/python/pyopencga/rest_clients/individual_client.py | fabbondanza/opencga | a00ff9b26b94585212c09cce4a6d23d161b0851e | [
"Apache-2.0"
] | null | null | null | """
WARNING: AUTOGENERATED CODE
This code was generated by a tool.
Autogenerated on: 2020-07-05 08:52:22
Manual changes to this file may cause unexpected behavior in your application.
Manual changes to this file will be overwritten if the code is regenerated.
"""
from pyopencga.rest_clients._parent_rest_clients import _ParentRestClient
class Individual(_ParentRestClient):
"""
This class contains methods for the 'Individuals' webservices
Client version: 2.0.0
PATH: /{apiVersion}/individuals
"""
def __init__(self, configuration, token=None, login_handler=None, *args, **kwargs):
super(Individual, self).__init__(configuration, token, login_handler, *args, **kwargs)
def update_acl(self, members, action, data=None, **options):
"""
Update the set of permissions granted for the member.
PATH: /{apiVersion}/individuals/acl/{members}/update
:param dict data: JSON containing the parameters to update the
permissions. If propagate flag is set to true, it will propagate
the permissions defined to the samples that are associated to the
matching individuals. (REQUIRED)
:param str action: Action to be performed [ADD, SET, REMOVE or RESET].
(REQUIRED)
:param str members: Comma separated list of user or group ids.
(REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param bool propagate: Propagate individual permissions to related
samples.
"""
options['action'] = action
return self._post(category='individuals', resource='update', subcategory='acl', second_query_id=members, data=data, **options)
def aggregation_stats(self, **options):
"""
Fetch catalog individual stats.
PATH: /{apiVersion}/individuals/aggregationStats
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param bool has_father: Has father.
:param bool has_mother: Has mother.
:param str sex: Sex.
:param str karyotypic_sex: Karyotypic sex.
:param str ethnicity: Ethnicity.
:param str population: Population.
:param str creation_year: Creation year.
:param str creation_month: Creation month (JANUARY, FEBRUARY...).
:param str creation_day: Creation day.
:param str creation_day_of_week: Creation day of week (MONDAY,
TUESDAY...).
:param str status: Status.
:param str life_status: Life status.
:param str phenotypes: Phenotypes.
:param str num_samples: Number of samples.
:param bool parental_consanguinity: Parental consanguinity.
:param str release: Release.
:param str version: Version.
:param str annotation: Annotation filters. Example:
age>30;gender=FEMALE. For more information, please visit
http://docs.opencb.org/display/opencga/AnnotationSets+1.4.0.
:param bool default: Calculate default stats.
:param str field: List of fields separated by semicolons, e.g.:
studies;type. For nested fields use >>, e.g.:
studies>>biotype;type;numSamples[0..10]:1.
"""
return self._get(category='individuals', resource='aggregation_stats', **options)
def load_annotation_sets(self, variable_set_id, path, data=None, **options):
"""
Load annotation sets from a TSV file.
PATH: /{apiVersion}/individuals/annotationSets/load
:param str path: Path where the TSV file is located in OpenCGA or
where it should be located. (REQUIRED)
:param str variable_set_id: Variable set ID or name. (REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param bool parents: Flag indicating whether to create parent
directories if they don't exist (only when TSV file was not
previously associated).
:param str annotation_set_id: Annotation set id. If not provided,
variableSetId will be used.
:param dict data: JSON containing the 'content' of the TSV file if
this has not yet been registered into OpenCGA.
"""
options['variable_set_id'] = variable_set_id
options['path'] = path
return self._post(category='individuals', resource='load', subcategory='annotationSets', data=data, **options)
def create(self, data=None, **options):
"""
Create individual.
PATH: /{apiVersion}/individuals/create
:param dict data: JSON containing individual information. (REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param str samples: Comma separated list of sample ids to be
associated to the created individual.
"""
return self._post(category='individuals', resource='create', data=data, **options)
def search(self, **options):
"""
Search for individuals.
PATH: /{apiVersion}/individuals/search
:param str include: Fields included in the response, whole JSON path
must be provided.
:param str exclude: Fields excluded in the response, whole JSON path
must be provided.
:param int limit: Number of results to be returned.
:param int skip: Number of results to skip.
:param bool count: Get the total number of results matching the query.
Deactivated by default.
:param bool flatten_annotations: Flatten the annotations?.
:param str study: Study [[user@]project:]study where study and project
can be either the id or alias.
:param str name: name.
:param str father: father.
:param str mother: mother.
:param str samples: Comma separated list sample IDs or UUIDs up to a
maximum of 100.
:param str sex: sex.
:param str ethnicity: ethnicity.
:param str disorders: Comma separated list of disorder ids or names.
:param str population.name: Population name.
:param str population.subpopulation: Subpopulation name.
:param str population.description: Population description.
:param str phenotypes: Comma separated list of phenotype ids or names.
:param str karyotypic_sex: Karyotypic sex.
:param str life_status: Life status.
:param str affectation_status: Affectation status.
:param bool deleted: Boolean to retrieve deleted individuals.
:param str creation_date: Creation date. Format: yyyyMMddHHmmss.
Examples: >2018, 2017-2018, <201805.
:param str modification_date: Modification date. Format:
yyyyMMddHHmmss. Examples: >2018, 2017-2018, <201805.
:param str annotationset_name: DEPRECATED: Use annotation queryParam
this way: annotationSet[=|==|!|!=]{annotationSetName}.
:param str variable_set: DEPRECATED: Use annotation queryParam this
way: variableSet[=|==|!|!=]{variableSetId}.
:param str annotation: Annotation filters. Example:
age>30;gender=FEMALE. For more information, please visit
http://docs.opencb.org/display/opencga/AnnotationSets+1.4.0.
:param str acl: Filter entries for which a user has the provided
permissions. Format: acl={user}:{permissions}. Example:
acl=john:WRITE,WRITE_ANNOTATIONS will return all entries for which
user john has both WRITE and WRITE_ANNOTATIONS permissions. Only
study owners or administrators can query by this field. .
:param str release: Release value (Current release from the moment the
individuals were first created).
:param int snapshot: Snapshot value (Latest version of individuals in
the specified release).
"""
return self._get(category='individuals', resource='search', **options)
def acl(self, individuals, **options):
"""
Return the acl of the individual. If member is provided, it will only
return the acl for the member.
PATH: /{apiVersion}/individuals/{individuals}/acl
:param str individuals: Comma separated list of individual names or
IDs up to a maximum of 100. (REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param str member: User or group id.
:param bool silent: Boolean to retrieve all possible entries that are
queried for, false to raise an exception whenever one of the
entries looked for cannot be shown for whichever reason.
"""
return self._get(category='individuals', resource='acl', query_id=individuals, **options)
def delete(self, individuals, **options):
"""
Delete existing individuals.
PATH: /{apiVersion}/individuals/{individuals}/delete
:param bool force: Force the deletion of individuals that already
belong to families.
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param str individuals: Comma separated list of individual ids.
"""
return self._delete(category='individuals', resource='delete', query_id=individuals, **options)
def info(self, individuals, **options):
"""
Get individual information.
PATH: /{apiVersion}/individuals/{individuals}/info
:param str individuals: Comma separated list of individual names or
IDs up to a maximum of 100. (REQUIRED)
:param str include: Fields included in the response, whole JSON path
must be provided.
:param str exclude: Fields excluded in the response, whole JSON path
must be provided.
:param bool flatten_annotations: Flatten the annotations?.
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param int version: Individual version.
:param bool deleted: Boolean to retrieve deleted individuals.
"""
return self._get(category='individuals', resource='info', query_id=individuals, **options)
def update(self, individuals, data=None, **options):
"""
Update some individual attributes.
PATH: /{apiVersion}/individuals/{individuals}/update
:param str individuals: Comma separated list of individual ids.
(REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param str samples_action: Action to be performed if the array of
samples is being updated. Allowed values: ['ADD', 'SET', 'REMOVE']
:param str annotation_sets_action: Action to be performed if the array
of annotationSets is being updated. Allowed values: ['ADD', 'SET',
'REMOVE']
:param bool inc_version: Create a new version of individual.
:param bool update_sample_version: Update all the sample references
from the individual to point to their latest versions.
:param dict data: body.
"""
return self._post(category='individuals', resource='update', query_id=individuals, data=data, **options)
def update_annotations(self, individual, annotation_set, data=None, **options):
"""
Update annotations from an annotationSet.
PATH: /{apiVersion}/individuals/{individual}/annotationSets/{annotationSet}/annotations/update
:param str individual: Individual ID or UUID. (REQUIRED)
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param str annotation_set: AnnotationSet ID to be updated.
:param str action: Action to be performed: ADD to add new annotations;
REPLACE to replace the value of an already existing annotation; SET
to set the new list of annotations removing any possible old
annotations; REMOVE to remove some annotations; RESET to set some
annotations to the default value configured in the corresponding
variables of the VariableSet if any. Allowed values: ['ADD', 'SET',
'REMOVE', 'RESET', 'REPLACE']
:param bool inc_version: Create a new version of individual.
:param bool update_sample_version: Update all the sample references
from the individual to point to their latest versions.
:param dict data: Json containing the map of annotations when the
action is ADD, SET or REPLACE, a json with only the key 'remove'
containing the comma separated variables to be removed as a value
when the action is REMOVE or a json with only the key 'reset'
containing the comma separated variables that will be set to the
default value when the action is RESET.
"""
return self._post(category='individuals', resource='annotations/update', query_id=individual, subcategory='annotationSets', second_query_id=annotation_set, data=data, **options)
def relatives(self, individual, **options):
"""
Get individual relatives.
PATH: /{apiVersion}/individuals/{individual}/relatives
:param str individual: Individual ID or UUID. (REQUIRED)
:param str include: Fields included in the response, whole JSON path
must be provided.
:param str exclude: Fields excluded in the response, whole JSON path
must be provided.
:param bool flatten_annotations: Flatten the annotations?.
:param str study: Study [[user@]project:]study where study and project
can be either the ID or UUID.
:param int degree: Pedigree degree.
"""
return self._get(category='individuals', resource='relatives', query_id=individual, **options)
| 48.219595 | 185 | 0.659147 | 1,738 | 14,273 | 5.36191 | 0.189873 | 0.060951 | 0.032192 | 0.021247 | 0.460994 | 0.426441 | 0.347355 | 0.330508 | 0.295955 | 0.269128 | 0 | 0.007221 | 0.262594 | 14,273 | 295 | 186 | 48.383051 | 0.878195 | 0.713935 | 0 | 0 | 1 | 0 | 0.111489 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.413793 | false | 0 | 0.034483 | 0 | 0.862069 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
6b1724dfb70b33baf276b43342fa35ef862257e3 | 36,273 | py | Python | pysnmp_mibs/DNS-SERVER-MIB.py | jackjack821/pysnmp-mibs | 9835ea0bb2420715caf4ee9aaa07d59bb263acd6 | [
"BSD-2-Clause"
] | 6 | 2017-04-21T13:48:08.000Z | 2022-01-06T19:42:52.000Z | pysnmp_mibs/DNS-SERVER-MIB.py | jackjack821/pysnmp-mibs | 9835ea0bb2420715caf4ee9aaa07d59bb263acd6 | [
"BSD-2-Clause"
] | 1 | 2020-05-05T16:42:25.000Z | 2020-05-05T16:42:25.000Z | pysnmp_mibs/DNS-SERVER-MIB.py | jackjack821/pysnmp-mibs | 9835ea0bb2420715caf4ee9aaa07d59bb263acd6 | [
"BSD-2-Clause"
] | 6 | 2020-02-08T20:28:49.000Z | 2021-09-14T13:36:46.000Z | #
# PySNMP MIB module DNS-SERVER-MIB (http://pysnmp.sf.net)
# ASN.1 source http://mibs.snmplabs.com:80/asn1/DNS-SERVER-MIB
# Produced by pysmi-0.0.7 at Sun Feb 14 00:08:40 2016
# On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose
# Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52)
#
( Integer, OctetString, ObjectIdentifier, ) = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection")
( NotificationGroup, ObjectGroup, ModuleCompliance, ) = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
( IpAddress, iso, Counter32, ObjectIdentity, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, Unsigned32, mib_2, Integer32, Gauge32, ModuleIdentity, TimeTicks, MibIdentifier, ) = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "iso", "Counter32", "ObjectIdentity", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "Unsigned32", "mib-2", "Integer32", "Gauge32", "ModuleIdentity", "TimeTicks", "MibIdentifier")
( TruthValue, TextualConvention, RowStatus, DisplayString, ) = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "RowStatus", "DisplayString")
dns = ObjectIdentity((1, 3, 6, 1, 2, 1, 32))
if mibBuilder.loadTexts: dns.setDescription('The OID assigned to DNS MIB work by the IANA.')
dnsServMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 32, 1))
if mibBuilder.loadTexts: dnsServMIB.setLastUpdated('9401282251Z')
if mibBuilder.loadTexts: dnsServMIB.setOrganization('IETF DNS Working Group')
if mibBuilder.loadTexts: dnsServMIB.setContactInfo(' Rob Austein\n Postal: Epilogue Technology Corporation\n 268 Main Street, Suite 283\n North Reading, MA 10864\n US\n Tel: +1 617 245 0804\n Fax: +1 617 245 8122\n E-Mail: sra@epilogue.com\n\n Jon Saperia\n Postal: Digital Equipment Corporation\n 110 Spit Brook Road\n ZKO1-3/H18\n Nashua, NH 03062-2698\n US\n Tel: +1 603 881 0480\n Fax: +1 603 881 0120\n Email: saperia@zko.dec.com')
if mibBuilder.loadTexts: dnsServMIB.setDescription('The MIB module for entities implementing the server side\n of the Domain Name System (DNS) protocol.')
dnsServMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 1))
dnsServConfig = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 1, 1))
dnsServCounter = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 1, 2))
dnsServOptCounter = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 1, 3))
dnsServZone = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 1, 4))
class DnsName(OctetString, TextualConvention):
subtypeSpec = OctetString.subtypeSpec+ValueSizeConstraint(0,255)
class DnsNameAsIndex(DnsName, TextualConvention):
pass
class DnsClass(Integer32, TextualConvention):
displayHint = '2d'
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,65535)
class DnsType(Integer32, TextualConvention):
displayHint = '2d'
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,65535)
class DnsQClass(Integer32, TextualConvention):
displayHint = '2d'
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,65535)
class DnsQType(Integer32, TextualConvention):
displayHint = '2d'
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,65535)
class DnsTime(Gauge32, TextualConvention):
displayHint = '4d'
class DnsOpCode(Integer32, TextualConvention):
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,15)
class DnsRespCode(Integer32, TextualConvention):
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(0,15)
dnsServConfigImplementIdent = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServConfigImplementIdent.setDescription("The implementation identification string for the DNS\n server software in use on the system, for example;\n `FNS-2.1'")
dnsServConfigRecurs = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3,))).clone(namedValues=NamedValues(("available", 1), ("restricted", 2), ("unavailable", 3),))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsServConfigRecurs.setDescription('This represents the recursion services offered by this\n name server. The values that can be read or written\n are:\n\n available(1) - performs recursion on requests from\n clients.\n\n restricted(2) - recursion is performed on requests only\n from certain clients, for example; clients on an access\n control list.\n\n unavailable(3) - recursion is not available.')
dnsServConfigUpTime = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 1, 3), DnsTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServConfigUpTime.setDescription('If the server has a persistent state (e.g., a process),\n this value will be the time elapsed since it started.\n For software without persistant state, this value will\n be zero.')
dnsServConfigResetTime = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 1, 4), DnsTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServConfigResetTime.setDescription("If the server has a persistent state (e.g., a process)\n and supports a `reset' operation (e.g., can be told to\n re-read configuration files), this value will be the\n time elapsed since the last time the name server was\n `reset.' For software that does not have persistence or\n does not support a `reset' operation, this value will be\n zero.")
dnsServConfigReset = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4,))).clone(namedValues=NamedValues(("other", 1), ("reset", 2), ("initializing", 3), ("running", 4),))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsServConfigReset.setDescription('Status/action object to reinitialize any persistant name\n server state. When set to reset(2), any persistant\n name server state (such as a process) is reinitialized as\n if the name server had just been started. This value\n will never be returned by a read operation. When read,\n one of the following values will be returned:\n other(1) - server in some unknown state;\n initializing(3) - server (re)initializing;\n running(4) - server currently running.')
dnsServCounterAuthAns = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterAuthAns.setDescription('Number of queries which were authoritatively answered.')
dnsServCounterAuthNoNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterAuthNoNames.setDescription("Number of queries for which `authoritative no such name'\n responses were made.")
dnsServCounterAuthNoDataResps = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterAuthNoDataResps.setDescription("Number of queries for which `authoritative no such data'\n (empty answer) responses were made.")
dnsServCounterNonAuthDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterNonAuthDatas.setDescription('Number of queries which were non-authoritatively\n answered (cached data).')
dnsServCounterNonAuthNoDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterNonAuthNoDatas.setDescription('Number of queries which were non-authoritatively\n answered with no data (empty answer).')
dnsServCounterReferrals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterReferrals.setDescription('Number of requests that were referred to other servers.')
dnsServCounterErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterErrors.setDescription('Number of requests the server has processed that were\n answered with errors (RCODE values other than 0 and 3).')
dnsServCounterRelNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterRelNames.setDescription('Number of requests received by the server for names that\n are only 1 label long (text form - no internal dots).')
dnsServCounterReqRefusals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterReqRefusals.setDescription('Number of DNS requests refused by the server.')
dnsServCounterReqUnparses = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterReqUnparses.setDescription('Number of requests received which were unparseable.')
dnsServCounterOtherErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterOtherErrors.setDescription('Number of requests which were aborted for other (local)\n server errors.')
dnsServCounterTable = MibTable((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13), )
if mibBuilder.loadTexts: dnsServCounterTable.setDescription('Counter information broken down by DNS class and type.')
dnsServCounterEntry = MibTableRow((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1), ).setIndexNames((0, "DNS-SERVER-MIB", "dnsServCounterOpCode"), (0, "DNS-SERVER-MIB", "dnsServCounterQClass"), (0, "DNS-SERVER-MIB", "dnsServCounterQType"), (0, "DNS-SERVER-MIB", "dnsServCounterTransport"))
if mibBuilder.loadTexts: dnsServCounterEntry.setDescription("This table contains count information for each DNS class\n and type value known to the server. The index allows\n management software to to create indices to the table to\n get the specific information desired, e.g., number of\n queries over UDP for records with type value `A' which\n came to this server. In order to prevent an\n uncontrolled expansion of rows in the table; if\n dnsServCounterRequests is 0 and dnsServCounterResponses\n is 0, then the row does not exist and `no such' is\n returned when the agent is queried for such instances.")
dnsServCounterOpCode = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 1), DnsOpCode())
if mibBuilder.loadTexts: dnsServCounterOpCode.setDescription('The DNS OPCODE being counted in this row of the table.')
dnsServCounterQClass = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 2), DnsClass())
if mibBuilder.loadTexts: dnsServCounterQClass.setDescription('The class of record being counted in this row of the\n table.')
dnsServCounterQType = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 3), DnsType())
if mibBuilder.loadTexts: dnsServCounterQType.setDescription('The type of record which is being counted in this row in\n the table.')
dnsServCounterTransport = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3,))).clone(namedValues=NamedValues(("udp", 1), ("tcp", 2), ("other", 3),)))
if mibBuilder.loadTexts: dnsServCounterTransport.setDescription('A value of udp(1) indicates that the queries reported on\n this row were sent using UDP.\n\n A value of tcp(2) indicates that the queries reported on\n this row were sent using TCP.\n\n A value of other(3) indicates that the queries reported\n on this row were sent using a transport that was neither\n TCP nor UDP.')
dnsServCounterRequests = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterRequests.setDescription('Number of requests (queries) that have been recorded in\n this row of the table.')
dnsServCounterResponses = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 2, 13, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServCounterResponses.setDescription('Number of responses made by the server since\n initialization for the kind of query identified on this\n row of the table.')
dnsServOptCounterSelfAuthAns = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfAuthAns.setDescription('Number of requests the server has processed which\n originated from a resolver on the same host for which\n there has been an authoritative answer.')
dnsServOptCounterSelfAuthNoNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfAuthNoNames.setDescription('Number of requests the server has processed which\n originated from a resolver on the same host for which\n there has been an authoritative no such name answer\n given.')
dnsServOptCounterSelfAuthNoDataResps = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfAuthNoDataResps.setDescription('Number of requests the server has processed which\n originated from a resolver on the same host for which\n there has been an authoritative no such data answer\n (empty answer) made.')
dnsServOptCounterSelfNonAuthDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfNonAuthDatas.setDescription('Number of requests the server has processed which\n originated from a resolver on the same host for which a\n non-authoritative answer (cached data) was made.')
dnsServOptCounterSelfNonAuthNoDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfNonAuthNoDatas.setDescription("Number of requests the server has processed which\n originated from a resolver on the same host for which a\n `non-authoritative, no such data' response was made\n (empty answer).")
dnsServOptCounterSelfReferrals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfReferrals.setDescription('Number of queries the server has processed which\n originated from a resolver on the same host and were\n referred to other servers.')
dnsServOptCounterSelfErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfErrors.setDescription('Number of requests the server has processed which\n originated from a resolver on the same host which have\n been answered with errors (RCODEs other than 0 and 3).')
dnsServOptCounterSelfRelNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfRelNames.setDescription('Number of requests received for names that are only 1\n label long (text form - no internal dots) the server has\n processed which originated from a resolver on the same\n host.')
dnsServOptCounterSelfReqRefusals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfReqRefusals.setDescription('Number of DNS requests refused by the server which\n originated from a resolver on the same host.')
dnsServOptCounterSelfReqUnparses = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfReqUnparses.setDescription('Number of requests received which were unparseable and\n which originated from a resolver on the same host.')
dnsServOptCounterSelfOtherErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterSelfOtherErrors.setDescription('Number of requests which were aborted for other (local)\n server errors and which originated on the same host.')
dnsServOptCounterFriendsAuthAns = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsAuthAns.setDescription('Number of queries originating from friends which were\n authoritatively answered. The definition of friends is\n a locally defined matter.')
dnsServOptCounterFriendsAuthNoNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsAuthNoNames.setDescription("Number of queries originating from friends, for which\n authoritative `no such name' responses were made. The\n definition of friends is a locally defined matter.")
dnsServOptCounterFriendsAuthNoDataResps = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsAuthNoDataResps.setDescription('Number of queries originating from friends for which\n authoritative no such data (empty answer) responses were\n made. The definition of friends is a locally defined\n matter.')
dnsServOptCounterFriendsNonAuthDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsNonAuthDatas.setDescription('Number of queries originating from friends which were\n non-authoritatively answered (cached data). The\n definition of friends is a locally defined matter.')
dnsServOptCounterFriendsNonAuthNoDatas = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsNonAuthNoDatas.setDescription('Number of queries originating from friends which were\n non-authoritatively answered with no such data (empty\n answer).')
dnsServOptCounterFriendsReferrals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsReferrals.setDescription('Number of requests which originated from friends that\n were referred to other servers. The definition of\n friends is a locally defined matter.')
dnsServOptCounterFriendsErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsErrors.setDescription('Number of requests the server has processed which\n originated from friends and were answered with errors\n (RCODE values other than 0 and 3). The definition of\n friends is a locally defined matter.')
dnsServOptCounterFriendsRelNames = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsRelNames.setDescription('Number of requests received for names from friends that\n are only 1 label long (text form - no internal dots) the\n server has processed.')
dnsServOptCounterFriendsReqRefusals = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsReqRefusals.setDescription("Number of DNS requests refused by the server which were\n received from `friends'.")
dnsServOptCounterFriendsReqUnparses = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsReqUnparses.setDescription("Number of requests received which were unparseable and\n which originated from `friends'.")
dnsServOptCounterFriendsOtherErrors = MibScalar((1, 3, 6, 1, 2, 1, 32, 1, 1, 3, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServOptCounterFriendsOtherErrors.setDescription("Number of requests which were aborted for other (local)\n server errors and which originated from `friends'.")
dnsServZoneTable = MibTable((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1), )
if mibBuilder.loadTexts: dnsServZoneTable.setDescription("Table of zones for which this name server provides\n information. Each of the zones may be loaded from stable\n storage via an implementation-specific mechanism or may\n be obtained from another name server via a zone transfer.\n\n If name server doesn't load any zones, this table is\n empty.")
dnsServZoneEntry = MibTableRow((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1), ).setIndexNames((0, "DNS-SERVER-MIB", "dnsServZoneName"), (0, "DNS-SERVER-MIB", "dnsServZoneClass"))
if mibBuilder.loadTexts: dnsServZoneEntry.setDescription('An entry in the name server zone table. New rows may be\n added either via SNMP or by the name server itself.')
dnsServZoneName = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 1), DnsNameAsIndex())
if mibBuilder.loadTexts: dnsServZoneName.setDescription("DNS name of the zone described by this row of the table.\n This is the owner name of the SOA RR that defines the\n top of the zone. This is name is in uppercase:\n characters 'a' through 'z' are mapped to 'A' through 'Z'\n in order to make the lexical ordering useful.")
dnsServZoneClass = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 2), DnsClass())
if mibBuilder.loadTexts: dnsServZoneClass.setDescription('DNS class of the RRs in this zone.')
dnsServZoneLastReloadSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 3), DnsTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneLastReloadSuccess.setDescription('Elapsed time in seconds since last successful reload of\n this zone.')
dnsServZoneLastReloadAttempt = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 4), DnsTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneLastReloadAttempt.setDescription('Elapsed time in seconds since last attempted reload of\n this zone.')
dnsServZoneLastSourceAttempt = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneLastSourceAttempt.setDescription('IP address of host from which most recent zone transfer\n of this zone was attempted. This value should match the\n value of dnsServZoneSourceSuccess if the attempt was\n succcessful. If zone transfer has not been attempted\n within the memory of this name server, this value should\n be 0.0.0.0.')
dnsServZoneStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: dnsServZoneStatus.setDescription('The status of the information represented in this row of\n the table.')
dnsServZoneSerial = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneSerial.setDescription('Zone serial number (from the SOA RR) of the zone\n represented by this row of the table. If the zone has\n not been successfully loaded within the memory of this\n name server, the value of this variable is zero.')
dnsServZoneCurrent = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 8), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneCurrent.setDescription("Whether the server's copy of the zone represented by\n this row of the table is currently valid. If the zone\n has never been successfully loaded or has expired since\n it was last succesfully loaded, this variable will have\n the value false(2), otherwise this variable will have\n the value true(1).")
dnsServZoneLastSourceSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 1, 1, 9), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dnsServZoneLastSourceSuccess.setDescription('IP address of host which was the source of the most\n recent successful zone transfer for this zone. If\n unknown (e.g., zone has never been successfully\n transfered) or irrelevant (e.g., zone was loaded from\n stable storage), this value should be 0.0.0.0.')
dnsServZoneSrcTable = MibTable((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2), )
if mibBuilder.loadTexts: dnsServZoneSrcTable.setDescription('This table is a list of IP addresses from which the\n server will attempt to load zone information using DNS\n zone transfer operations. A reload may occur due to SNMP\n operations that create a row in dnsServZoneTable or a\n SET to object dnsServZoneReload. This table is only\n used when the zone is loaded via zone transfer.')
dnsServZoneSrcEntry = MibTableRow((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2, 1), ).setIndexNames((0, "DNS-SERVER-MIB", "dnsServZoneSrcName"), (0, "DNS-SERVER-MIB", "dnsServZoneSrcClass"), (0, "DNS-SERVER-MIB", "dnsServZoneSrcAddr"))
if mibBuilder.loadTexts: dnsServZoneSrcEntry.setDescription('An entry in the name server zone source table.')
dnsServZoneSrcName = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2, 1, 1), DnsNameAsIndex())
if mibBuilder.loadTexts: dnsServZoneSrcName.setDescription('DNS name of the zone to which this entry applies.')
dnsServZoneSrcClass = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2, 1, 2), DnsClass())
if mibBuilder.loadTexts: dnsServZoneSrcClass.setDescription('DNS class of zone to which this entry applies.')
dnsServZoneSrcAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2, 1, 3), IpAddress())
if mibBuilder.loadTexts: dnsServZoneSrcAddr.setDescription('IP address of name server host from which this zone\n might be obtainable.')
dnsServZoneSrcStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 32, 1, 1, 4, 2, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: dnsServZoneSrcStatus.setDescription('The status of the information represented in this row of\n the table.')
dnsServMIBGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 2))
dnsServConfigGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 32, 1, 2, 1)).setObjects(*(("DNS-SERVER-MIB", "dnsServConfigImplementIdent"), ("DNS-SERVER-MIB", "dnsServConfigRecurs"), ("DNS-SERVER-MIB", "dnsServConfigUpTime"), ("DNS-SERVER-MIB", "dnsServConfigResetTime"), ("DNS-SERVER-MIB", "dnsServConfigReset"),))
if mibBuilder.loadTexts: dnsServConfigGroup.setDescription('A collection of objects providing basic configuration\n control of a DNS name server.')
dnsServCounterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 32, 1, 2, 2)).setObjects(*(("DNS-SERVER-MIB", "dnsServCounterAuthAns"), ("DNS-SERVER-MIB", "dnsServCounterAuthNoNames"), ("DNS-SERVER-MIB", "dnsServCounterAuthNoDataResps"), ("DNS-SERVER-MIB", "dnsServCounterNonAuthDatas"), ("DNS-SERVER-MIB", "dnsServCounterNonAuthNoDatas"), ("DNS-SERVER-MIB", "dnsServCounterReferrals"), ("DNS-SERVER-MIB", "dnsServCounterErrors"), ("DNS-SERVER-MIB", "dnsServCounterRelNames"), ("DNS-SERVER-MIB", "dnsServCounterReqRefusals"), ("DNS-SERVER-MIB", "dnsServCounterReqUnparses"), ("DNS-SERVER-MIB", "dnsServCounterOtherErrors"), ("DNS-SERVER-MIB", "dnsServCounterOpCode"), ("DNS-SERVER-MIB", "dnsServCounterQClass"), ("DNS-SERVER-MIB", "dnsServCounterQType"), ("DNS-SERVER-MIB", "dnsServCounterTransport"), ("DNS-SERVER-MIB", "dnsServCounterRequests"), ("DNS-SERVER-MIB", "dnsServCounterResponses"),))
if mibBuilder.loadTexts: dnsServCounterGroup.setDescription('A collection of objects providing basic instrumentation\n of a DNS name server.')
dnsServOptCounterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 32, 1, 2, 3)).setObjects(*(("DNS-SERVER-MIB", "dnsServOptCounterSelfAuthAns"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfAuthNoNames"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfAuthNoDataResps"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfNonAuthDatas"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfNonAuthNoDatas"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfReferrals"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfErrors"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfRelNames"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfReqRefusals"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfReqUnparses"), ("DNS-SERVER-MIB", "dnsServOptCounterSelfOtherErrors"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsAuthAns"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsAuthNoNames"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsAuthNoDataResps"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsNonAuthDatas"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsNonAuthNoDatas"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsReferrals"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsErrors"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsRelNames"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsReqRefusals"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsReqUnparses"), ("DNS-SERVER-MIB", "dnsServOptCounterFriendsOtherErrors"),))
if mibBuilder.loadTexts: dnsServOptCounterGroup.setDescription('A collection of objects providing extended\n instrumentation of a DNS name server.')
dnsServZoneGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 32, 1, 2, 4)).setObjects(*(("DNS-SERVER-MIB", "dnsServZoneName"), ("DNS-SERVER-MIB", "dnsServZoneClass"), ("DNS-SERVER-MIB", "dnsServZoneLastReloadSuccess"), ("DNS-SERVER-MIB", "dnsServZoneLastReloadAttempt"), ("DNS-SERVER-MIB", "dnsServZoneLastSourceAttempt"), ("DNS-SERVER-MIB", "dnsServZoneLastSourceSuccess"), ("DNS-SERVER-MIB", "dnsServZoneStatus"), ("DNS-SERVER-MIB", "dnsServZoneSerial"), ("DNS-SERVER-MIB", "dnsServZoneCurrent"), ("DNS-SERVER-MIB", "dnsServZoneSrcName"), ("DNS-SERVER-MIB", "dnsServZoneSrcClass"), ("DNS-SERVER-MIB", "dnsServZoneSrcAddr"), ("DNS-SERVER-MIB", "dnsServZoneSrcStatus"),))
if mibBuilder.loadTexts: dnsServZoneGroup.setDescription('A collection of objects providing configuration control\n of a DNS name server which loads authoritative zones.')
dnsServMIBCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 32, 1, 3))
dnsServMIBCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 32, 1, 3, 1)).setObjects(*(("DNS-SERVER-MIB", "dnsServConfigGroup"), ("DNS-SERVER-MIB", "dnsServCounterGroup"), ("DNS-SERVER-MIB", "dnsServOptCounterGroup"), ("DNS-SERVER-MIB", "dnsServZoneGroup"),))
if mibBuilder.loadTexts: dnsServMIBCompliance.setDescription('The compliance statement for agents implementing the DNS\n name server MIB extensions.')
mibBuilder.exportSymbols("DNS-SERVER-MIB", dnsServOptCounterSelfAuthNoDataResps=dnsServOptCounterSelfAuthNoDataResps, dnsServMIBCompliances=dnsServMIBCompliances, dnsServOptCounterSelfAuthNoNames=dnsServOptCounterSelfAuthNoNames, dnsServOptCounterSelfReqUnparses=dnsServOptCounterSelfReqUnparses, dnsServConfig=dnsServConfig, dnsServCounterReqUnparses=dnsServCounterReqUnparses, dnsServOptCounterSelfRelNames=dnsServOptCounterSelfRelNames, DnsName=DnsName, dnsServConfigRecurs=dnsServConfigRecurs, dnsServCounterTable=dnsServCounterTable, dnsServCounterOtherErrors=dnsServCounterOtherErrors, dnsServOptCounterFriendsRelNames=dnsServOptCounterFriendsRelNames, dnsServOptCounterSelfNonAuthNoDatas=dnsServOptCounterSelfNonAuthNoDatas, dnsServZoneEntry=dnsServZoneEntry, dnsServOptCounterGroup=dnsServOptCounterGroup, dnsServCounterReqRefusals=dnsServCounterReqRefusals, dnsServZoneSrcEntry=dnsServZoneSrcEntry, DnsType=DnsType, dnsServZoneLastSourceAttempt=dnsServZoneLastSourceAttempt, dnsServCounter=dnsServCounter, dnsServCounterAuthAns=dnsServCounterAuthAns, dnsServCounterEntry=dnsServCounterEntry, dnsServZoneLastReloadSuccess=dnsServZoneLastReloadSuccess, dnsServZoneLastReloadAttempt=dnsServZoneLastReloadAttempt, dnsServCounterOpCode=dnsServCounterOpCode, dnsServZone=dnsServZone, dnsServConfigReset=dnsServConfigReset, dnsServOptCounterFriendsOtherErrors=dnsServOptCounterFriendsOtherErrors, dnsServZoneTable=dnsServZoneTable, DnsClass=DnsClass, dnsServCounterRelNames=dnsServCounterRelNames, dnsServConfigGroup=dnsServConfigGroup, dnsServCounterAuthNoDataResps=dnsServCounterAuthNoDataResps, dnsServCounterQClass=dnsServCounterQClass, dnsServZoneStatus=dnsServZoneStatus, dnsServMIB=dnsServMIB, PYSNMP_MODULE_ID=dnsServMIB, dnsServMIBObjects=dnsServMIBObjects, dnsServCounterReferrals=dnsServCounterReferrals, DnsQClass=DnsQClass, dnsServZoneSrcClass=dnsServZoneSrcClass, dnsServMIBGroups=dnsServMIBGroups, dnsServOptCounterSelfAuthAns=dnsServOptCounterSelfAuthAns, dnsServOptCounter=dnsServOptCounter, DnsOpCode=DnsOpCode, dnsServOptCounterFriendsNonAuthNoDatas=dnsServOptCounterFriendsNonAuthNoDatas, dnsServMIBCompliance=dnsServMIBCompliance, dnsServCounterRequests=dnsServCounterRequests, dnsServOptCounterSelfReferrals=dnsServOptCounterSelfReferrals, dnsServZoneSrcAddr=dnsServZoneSrcAddr, dns=dns, dnsServCounterNonAuthDatas=dnsServCounterNonAuthDatas, dnsServZoneCurrent=dnsServZoneCurrent, dnsServConfigResetTime=dnsServConfigResetTime, dnsServCounterErrors=dnsServCounterErrors, dnsServCounterQType=dnsServCounterQType, dnsServZoneSrcStatus=dnsServZoneSrcStatus, dnsServOptCounterFriendsAuthAns=dnsServOptCounterFriendsAuthAns, dnsServZoneGroup=dnsServZoneGroup, dnsServOptCounterFriendsNonAuthDatas=dnsServOptCounterFriendsNonAuthDatas, DnsQType=DnsQType, DnsRespCode=DnsRespCode, dnsServZoneClass=dnsServZoneClass, dnsServCounterNonAuthNoDatas=dnsServCounterNonAuthNoDatas, dnsServOptCounterSelfNonAuthDatas=dnsServOptCounterSelfNonAuthDatas, dnsServOptCounterFriendsErrors=dnsServOptCounterFriendsErrors, dnsServCounterResponses=dnsServCounterResponses, DnsNameAsIndex=DnsNameAsIndex, dnsServOptCounterFriendsReqRefusals=dnsServOptCounterFriendsReqRefusals, dnsServCounterGroup=dnsServCounterGroup, dnsServOptCounterSelfReqRefusals=dnsServOptCounterSelfReqRefusals, dnsServZoneLastSourceSuccess=dnsServZoneLastSourceSuccess, dnsServOptCounterSelfErrors=dnsServOptCounterSelfErrors, dnsServCounterTransport=dnsServCounterTransport, dnsServCounterAuthNoNames=dnsServCounterAuthNoNames, dnsServOptCounterSelfOtherErrors=dnsServOptCounterSelfOtherErrors, dnsServConfigUpTime=dnsServConfigUpTime, DnsTime=DnsTime, dnsServOptCounterFriendsAuthNoDataResps=dnsServOptCounterFriendsAuthNoDataResps, dnsServOptCounterFriendsReferrals=dnsServOptCounterFriendsReferrals, dnsServZoneName=dnsServZoneName, dnsServZoneSrcName=dnsServZoneSrcName, dnsServOptCounterFriendsAuthNoNames=dnsServOptCounterFriendsAuthNoNames, dnsServZoneSrcTable=dnsServZoneSrcTable, dnsServZoneSerial=dnsServZoneSerial, dnsServConfigImplementIdent=dnsServConfigImplementIdent, dnsServOptCounterFriendsReqUnparses=dnsServOptCounterFriendsReqUnparses)
| 185.066327 | 4,131 | 0.733328 | 4,189 | 36,273 | 6.349009 | 0.13249 | 0.008197 | 0.008798 | 0.011581 | 0.379681 | 0.344525 | 0.294292 | 0.228155 | 0.208565 | 0.192961 | 0 | 0.04345 | 0.158024 | 36,273 | 195 | 4,132 | 186.015385 | 0.82738 | 0.008381 | 0 | 0.055866 | 0 | 0.212291 | 0.453671 | 0.039711 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.005587 | 0.03352 | 0 | 0.150838 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6b208cfd7f5c1eafd08aa2b397107073455db505 | 3,355 | py | Python | kde/applications/cantor/cantor.py | lnjX/craft-blueprints-kde | 08dc4a6c5873c362504a9454ddbf8e965e26d3c2 | [
"BSD-2-Clause"
] | null | null | null | kde/applications/cantor/cantor.py | lnjX/craft-blueprints-kde | 08dc4a6c5873c362504a9454ddbf8e965e26d3c2 | [
"BSD-2-Clause"
] | null | null | null | kde/applications/cantor/cantor.py | lnjX/craft-blueprints-kde | 08dc4a6c5873c362504a9454ddbf8e965e26d3c2 | [
"BSD-2-Clause"
] | null | null | null | import info
class subinfo(info.infoclass):
def setTargets(self):
self.versionInfo.setDefaultValues()
for ver in ["19.12.0", "19.12.1"]:
self.patchToApply[ver] = [('cantor-19.12.0-MSVC-libmarkdown.diff', 1)]
for ver in ["19.12.1"]:
self.patchToApply[ver] += [('cantor-19.12.1-Windows.diff', 1)]
self.description = "Cantor"
def setDependencies(self):
self.runtimeDependencies["virtual/base"] = None
self.buildDependencies["kde/frameworks/extra-cmake-modules"] = None
self.buildDependencies["dev-utils/png2ico"] = None
self.runtimeDependencies["libs/qt5/qtbase"] = None
self.runtimeDependencies["qt-libs/poppler"] = None
# R backend fails compiling with MSVC
if not CraftCore.compiler.isMSVC():
self.runtimeDependencies["binary/r-base"] = None
# we use Crafts python
#self.runtimeDependencies["dev-utils/python3"] = None
#self.runtimeDependencies["binary/python-libs"] = None
self.runtimeDependencies["kde/frameworks/tier1/kconfig"] = None
self.runtimeDependencies["kde/frameworks/tier2/kcrash"] = None
self.runtimeDependencies["kde/frameworks/tier2/kdoctools"] = None
self.runtimeDependencies["kde/frameworks/tier3/knewstuff"] = None
self.runtimeDependencies["kde/frameworks/tier3/kiconthemes"] = None
self.runtimeDependencies["kde/frameworks/tier3/ktexteditor"] = None
self.runtimeDependencies["kde/frameworks/tier1/kcoreaddons"] = None
self.runtimeDependencies["kde/frameworks/tier1/karchive"] = None
self.runtimeDependencies["kde/frameworks/tier3/kparts"] = None
self.runtimeDependencies["kde/frameworks/tier2/kpty"] = None
self.runtimeDependencies["kde/frameworks/tier3/ktextwidgets"] = None
self.runtimeDependencies["kde/frameworks/tier3/kio"] = None
self.runtimeDependencies["kde/frameworks/tier1/ki18n"] = None
self.runtimeDependencies["kde/frameworks/tier3/kxmlgui"] = None
self.runtimeDependencies["kde/applications/analitza"] = None
from Package.CMakePackageBase import *
class Package(CMakePackageBase):
def __init__(self):
CMakePackageBase.__init__(self)
if CraftCore.compiler.isWindows:
self.subinfo.options.make.supportsMultijob = False
# R backend fail compiling on Windows
#self.r_dir = os.path.join(CraftCore.standardDirs.craftRoot(), "lib", "R", "bin", "x64")
#self.subinfo.options.configure.args = "-DR_EXECUTABLE=" + OsUtils.toUnixPath(os.path.join(self.r_dir, "R.exe"))
#self.subinfo.options.configure.args += " -DR_R_LIBRARY=" + OsUtils.toUnixPath(os.path.join(self.r_dir, "R.dll"))
# Python (src/backends/python/pythonserver.cpp) backend fail compiling with MSVC
if not CraftCore.compiler.isMSVC():
pythonPath = CraftCore.settings.get("Paths", "PYTHON")
self.subinfo.options.configure.args += f" -DPYTHONLIBS3_LIBRARY={pythonPath}/python36.dll -DPYTHONLIBS3_INCLUDE_DIR={pythonPath}/include"
python27Path = CraftCore.settings.get("Paths", "PYTHON27")
self.subinfo.options.configure.args += f" -DPYTHON_LIBRARIES_DIR={python27Path}/libs -DPYTHON_INCLUDE_DIR={python27Path}/include"
| 54.112903 | 153 | 0.679881 | 359 | 3,355 | 6.295265 | 0.342618 | 0.213717 | 0.215044 | 0.199115 | 0.446903 | 0.438053 | 0.10177 | 0.10177 | 0.061947 | 0 | 0 | 0.022067 | 0.189568 | 3,355 | 61 | 154 | 55 | 0.809121 | 0.174665 | 0 | 0.046512 | 0 | 0 | 0.300834 | 0.254802 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069767 | false | 0 | 0.046512 | 0 | 0.162791 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6b235d3e6bb78e9af5067e3a41f0c940125919b3 | 222 | py | Python | src/web/dev/db-02.py | momacs/pram | d2de43ea447d13a65d814f781ec86889754f76fe | [
"BSD-3-Clause"
] | 10 | 2019-01-18T19:11:54.000Z | 2022-03-16T08:39:36.000Z | src/web/dev/db-02.py | momacs/pram | d2de43ea447d13a65d814f781ec86889754f76fe | [
"BSD-3-Clause"
] | 2 | 2019-02-19T15:10:44.000Z | 2019-02-26T04:26:24.000Z | src/web/dev/db-02.py | momacs/pram | d2de43ea447d13a65d814f781ec86889754f76fe | [
"BSD-3-Clause"
] | 3 | 2019-02-19T15:11:08.000Z | 2021-08-20T11:51:04.000Z | ''' Generate an ER diagram from a SQLite DB. '''
from eralchemy import render_er
fpath_db = os.path.join(os.path.dirname(__file__), '..', 'db', 'allegheny-students.sqlite3')
render_er('sqlite:///{fpath_db}', 'erd.png')
| 27.75 | 92 | 0.693694 | 33 | 222 | 4.424242 | 0.666667 | 0.109589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005076 | 0.112613 | 222 | 7 | 93 | 31.714286 | 0.736041 | 0.18018 | 0 | 0 | 1 | 0 | 0.327586 | 0.149425 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
6b2eb87f1538c7542e37173175e91ca6381afd3f | 14,534 | py | Python | tests/components/xiaomi_miio/test_vacuum.py | mfaraco/home-assistant | a91b0058229974aed578fb20e833df4454d007f9 | [
"Apache-2.0"
] | 23 | 2017-11-15T21:03:53.000Z | 2021-03-29T21:33:48.000Z | tests/components/xiaomi_miio/test_vacuum.py | mfaraco/home-assistant | a91b0058229974aed578fb20e833df4454d007f9 | [
"Apache-2.0"
] | 6 | 2021-02-08T20:59:36.000Z | 2022-03-12T00:52:11.000Z | tests/components/xiaomi_miio/test_vacuum.py | ajk12345-code/home-assistant | 85624e80312e0487f51653ce24b398320972cbec | [
"Apache-2.0"
] | 10 | 2018-01-01T00:12:51.000Z | 2021-12-21T23:08:05.000Z | """The tests for the Xiaomi vacuum platform."""
import asyncio
from datetime import time, timedelta
from unittest import mock
import pytest
from homeassistant.components.vacuum import (
ATTR_BATTERY_ICON,
ATTR_FAN_SPEED,
ATTR_FAN_SPEED_LIST,
DOMAIN,
SERVICE_CLEAN_SPOT,
SERVICE_LOCATE,
SERVICE_RETURN_TO_BASE,
SERVICE_SEND_COMMAND,
SERVICE_SET_FAN_SPEED,
SERVICE_START,
SERVICE_STOP,
STATE_CLEANING,
STATE_ERROR,
)
from homeassistant.components.xiaomi_miio.vacuum import (
ATTR_CLEANED_AREA,
ATTR_CLEANED_TOTAL_AREA,
ATTR_CLEANING_COUNT,
ATTR_CLEANING_TIME,
ATTR_CLEANING_TOTAL_TIME,
ATTR_DO_NOT_DISTURB,
ATTR_DO_NOT_DISTURB_END,
ATTR_DO_NOT_DISTURB_START,
ATTR_ERROR,
ATTR_FILTER_LEFT,
ATTR_MAIN_BRUSH_LEFT,
ATTR_SIDE_BRUSH_LEFT,
CONF_HOST,
CONF_NAME,
CONF_TOKEN,
DOMAIN as XIAOMI_DOMAIN,
SERVICE_CLEAN_ZONE,
SERVICE_MOVE_REMOTE_CONTROL,
SERVICE_MOVE_REMOTE_CONTROL_STEP,
SERVICE_START_REMOTE_CONTROL,
SERVICE_STOP_REMOTE_CONTROL,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_PLATFORM,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
PLATFORM = "xiaomi_miio"
# calls made when device status is requested
STATUS_CALLS = [
mock.call.status(),
mock.call.consumable_status(),
mock.call.clean_history(),
mock.call.dnd_status(),
]
@pytest.fixture(name="mock_mirobo_is_got_error")
def mirobo_is_got_error_fixture():
"""Mock mock_mirobo."""
mock_vacuum = mock.MagicMock()
mock_vacuum.status().data = {"test": "raw"}
mock_vacuum.status().is_on = False
mock_vacuum.status().fanspeed = 38
mock_vacuum.status().got_error = True
mock_vacuum.status().error = "Error message"
mock_vacuum.status().battery = 82
mock_vacuum.status().clean_area = 123.43218
mock_vacuum.status().clean_time = timedelta(hours=2, minutes=35, seconds=34)
mock_vacuum.consumable_status().main_brush_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.consumable_status().side_brush_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.consumable_status().filter_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.clean_history().count = "35"
mock_vacuum.clean_history().total_area = 123.43218
mock_vacuum.clean_history().total_duration = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.status().state = "Test Xiaomi Charging"
mock_vacuum.dnd_status().enabled = True
mock_vacuum.dnd_status().start = time(hour=22, minute=0)
mock_vacuum.dnd_status().end = time(hour=6, minute=0)
with mock.patch(
"homeassistant.components.xiaomi_miio.vacuum.Vacuum"
) as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
@pytest.fixture(name="mock_mirobo_is_on")
def mirobo_is_on_fixture():
"""Mock mock_mirobo."""
mock_vacuum = mock.MagicMock()
mock_vacuum.status().data = {"test": "raw"}
mock_vacuum.status().is_on = True
mock_vacuum.status().fanspeed = 99
mock_vacuum.status().got_error = False
mock_vacuum.status().battery = 32
mock_vacuum.status().clean_area = 133.43218
mock_vacuum.status().clean_time = timedelta(hours=2, minutes=55, seconds=34)
mock_vacuum.consumable_status().main_brush_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.consumable_status().side_brush_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.consumable_status().filter_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.clean_history().count = "41"
mock_vacuum.clean_history().total_area = 323.43218
mock_vacuum.clean_history().total_duration = timedelta(
hours=11, minutes=15, seconds=34
)
mock_vacuum.status().state = "Test Xiaomi Cleaning"
mock_vacuum.status().state_code = 5
mock_vacuum.dnd_status().enabled = False
with mock.patch(
"homeassistant.components.xiaomi_miio.vacuum.Vacuum"
) as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
@pytest.fixture(name="mock_mirobo_errors")
def mirobo_errors_fixture():
"""Mock mock_mirobo_errors to simulate a bad vacuum status request."""
mock_vacuum = mock.MagicMock()
mock_vacuum.status.side_effect = OSError()
with mock.patch(
"homeassistant.components.xiaomi_miio.vacuum.Vacuum"
) as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
@asyncio.coroutine
def test_xiaomi_exceptions(hass, caplog, mock_mirobo_errors):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_error"
yield from async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: PLATFORM,
CONF_HOST: "127.0.0.1",
CONF_NAME: entity_name,
CONF_TOKEN: "12345678901234567890123456789012",
}
},
)
yield from hass.async_block_till_done()
assert "Initializing with host 127.0.0.1 (token 12345...)" in caplog.text
assert mock_mirobo_errors.status.call_count == 1
assert "ERROR" in caplog.text
assert "Got OSError while fetching the state" in caplog.text
@asyncio.coroutine
def test_xiaomi_vacuum_services(hass, caplog, mock_mirobo_is_got_error):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_1"
entity_id = "{}.{}".format(DOMAIN, entity_name)
yield from async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: PLATFORM,
CONF_HOST: "127.0.0.1",
CONF_NAME: entity_name,
CONF_TOKEN: "12345678901234567890123456789012",
}
},
)
yield from hass.async_block_till_done()
assert "Initializing with host 127.0.0.1 (token 12345...)" in caplog.text
# Check state attributes
state = hass.states.get(entity_id)
assert state.state == STATE_ERROR
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204
assert state.attributes.get(ATTR_DO_NOT_DISTURB) == STATE_ON
assert state.attributes.get(ATTR_DO_NOT_DISTURB_START) == "22:00:00"
assert state.attributes.get(ATTR_DO_NOT_DISTURB_END) == "06:00:00"
assert state.attributes.get(ATTR_ERROR) == "Error message"
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-80"
assert state.attributes.get(ATTR_CLEANING_TIME) == 155
assert state.attributes.get(ATTR_CLEANED_AREA) == 123
assert state.attributes.get(ATTR_FAN_SPEED) == "Quiet"
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == [
"Quiet",
"Balanced",
"Turbo",
"Max",
"Gentle",
]
assert state.attributes.get(ATTR_MAIN_BRUSH_LEFT) == 12
assert state.attributes.get(ATTR_SIDE_BRUSH_LEFT) == 12
assert state.attributes.get(ATTR_FILTER_LEFT) == 12
assert state.attributes.get(ATTR_CLEANING_COUNT) == 35
assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 123
assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 695
# Call services
yield from hass.services.async_call(
DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.resume_or_start()], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.stop()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.home()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.find()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.spot()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
# Set speed service:
yield from hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": 60},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.set_fan_speed(60)], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": "turbo"},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.set_fan_speed(77)], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
assert "ERROR" not in caplog.text
yield from hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": "invent"},
blocking=True,
)
assert "ERROR" in caplog.text
yield from hass.services.async_call(
DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": entity_id, "command": "raw"},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.raw_command("raw", None)], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
yield from hass.services.async_call(
DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": entity_id, "command": "raw", "params": {"k1": 2}},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.raw_command("raw", {"k1": 2})], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
@asyncio.coroutine
def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_2"
entity_id = "{}.{}".format(DOMAIN, entity_name)
yield from async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: PLATFORM,
CONF_HOST: "192.168.1.100",
CONF_NAME: entity_name,
CONF_TOKEN: "12345678901234567890123456789012",
}
},
)
yield from hass.async_block_till_done()
assert "Initializing with host 192.168.1.100 (token 12345" in caplog.text
# Check state attributes
state = hass.states.get(entity_id)
assert state.state == STATE_CLEANING
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204
assert state.attributes.get(ATTR_DO_NOT_DISTURB) == STATE_OFF
assert state.attributes.get(ATTR_ERROR) is None
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-30"
assert state.attributes.get(ATTR_CLEANING_TIME) == 175
assert state.attributes.get(ATTR_CLEANED_AREA) == 133
assert state.attributes.get(ATTR_FAN_SPEED) == 99
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == [
"Quiet",
"Balanced",
"Turbo",
"Max",
"Gentle",
]
assert state.attributes.get(ATTR_MAIN_BRUSH_LEFT) == 11
assert state.attributes.get(ATTR_SIDE_BRUSH_LEFT) == 11
assert state.attributes.get(ATTR_FILTER_LEFT) == 11
assert state.attributes.get(ATTR_CLEANING_COUNT) == 41
assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 323
assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 675
# Xiaomi vacuum specific services:
yield from hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_START_REMOTE_CONTROL,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.assert_has_calls([mock.call.manual_start()], any_order=True)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control = {"duration": 1000, "rotation": -40, "velocity": -0.1}
yield from hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_MOVE_REMOTE_CONTROL, control, blocking=True
)
mock_mirobo_is_on.manual_control.assert_has_calls(
[mock.call(**control)], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
yield from hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_STOP_REMOTE_CONTROL, {}, blocking=True
)
mock_mirobo_is_on.assert_has_calls([mock.call.manual_stop()], any_order=True)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control_once = {"duration": 2000, "rotation": 120, "velocity": 0.1}
yield from hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_MOVE_REMOTE_CONTROL_STEP, control_once, blocking=True
)
mock_mirobo_is_on.manual_control_once.assert_has_calls(
[mock.call(**control_once)], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control = {"zone": [[123, 123, 123, 123]], "repeats": 2}
yield from hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_CLEAN_ZONE, control, blocking=True
)
mock_mirobo_is_on.zoned_clean.assert_has_calls(
[mock.call([[123, 123, 123, 123, 2]])], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
| 34.9375 | 84 | 0.70256 | 1,947 | 14,534 | 4.873652 | 0.105804 | 0.0548 | 0.058173 | 0.070819 | 0.807145 | 0.755401 | 0.72916 | 0.676467 | 0.614501 | 0.564127 | 0 | 0.033091 | 0.195335 | 14,534 | 415 | 85 | 35.021687 | 0.778281 | 0.027109 | 0 | 0.430556 | 0 | 0 | 0.073197 | 0.023902 | 0 | 0 | 0 | 0 | 0.188889 | 1 | 0.016667 | false | 0 | 0.022222 | 0 | 0.038889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6b2f81138ccbc824d6a2a562dfa045efc46fc3ba | 671 | py | Python | tests/test_merge_rdf.py | ctnitschke/feedcollector | 6e8bd67f34177d142131aa8306fb5cc03bdd55df | [
"Apache-2.0"
] | null | null | null | tests/test_merge_rdf.py | ctnitschke/feedcollector | 6e8bd67f34177d142131aa8306fb5cc03bdd55df | [
"Apache-2.0"
] | null | null | null | tests/test_merge_rdf.py | ctnitschke/feedcollector | 6e8bd67f34177d142131aa8306fb5cc03bdd55df | [
"Apache-2.0"
] | null | null | null | import pytest
try:
from lxml.etree import ElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
import feedcollector
def test_merge():
old_tree = ET.parse('tests/data/rdf-old.rss')
new_tree = ET.parse('tests/data/rdf-new.rss')
merged_tree = feedcollector.rdf.merge_feeds(new_tree, old_tree)
output_string = ET.tostring(merged_tree.getroot(), encoding='unicode')
assert 'first' in output_string
assert 'second' in output_string
assert 'third' in output_string
assert 'fourth' in output_string
assert 'fifth' in output_string
assert 'sixth' in output_string
assert 'seventh' in output_string
| 27.958333 | 74 | 0.734724 | 96 | 671 | 4.96875 | 0.427083 | 0.201258 | 0.205451 | 0.251572 | 0.205451 | 0.096436 | 0 | 0 | 0 | 0 | 0 | 0 | 0.178838 | 671 | 23 | 75 | 29.173913 | 0.865699 | 0 | 0 | 0 | 0 | 0 | 0.134128 | 0.065574 | 0 | 0 | 0 | 0 | 0.388889 | 1 | 0.055556 | false | 0 | 0.277778 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
6b30778326365c545d2dee1806d2c5e35f11f426 | 124 | py | Python | expert_tourist/models/__init__.py | richin13/expert-tourist | 6a7827d9216fea1712aa129001715f6fcb1677f2 | [
"MIT"
] | null | null | null | expert_tourist/models/__init__.py | richin13/expert-tourist | 6a7827d9216fea1712aa129001715f6fcb1677f2 | [
"MIT"
] | 5 | 2017-06-11T04:36:15.000Z | 2021-06-01T21:55:05.000Z | expert_tourist/models/__init__.py | richin13/expert-tourist | 6a7827d9216fea1712aa129001715f6fcb1677f2 | [
"MIT"
] | null | null | null | from flask_mongoengine import MongoEngine
db = MongoEngine()
from .user import User
from .place import Place, PlaceLoader
| 17.714286 | 41 | 0.806452 | 16 | 124 | 6.1875 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.145161 | 124 | 6 | 42 | 20.666667 | 0.933962 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
861a07171757958d2ef1f3b4bba93c91766ec7e8 | 4,675 | py | Python | pandasticsearch/client.py | macthestack/pandasticsearch | 9fa03497886a8d1a1ca316b837def9fc072320b5 | [
"MIT"
] | null | null | null | pandasticsearch/client.py | macthestack/pandasticsearch | 9fa03497886a8d1a1ca316b837def9fc072320b5 | [
"MIT"
] | null | null | null | pandasticsearch/client.py | macthestack/pandasticsearch | 9fa03497886a8d1a1ca316b837def9fc072320b5 | [
"MIT"
] | null | null | null | # -*- coding: UTF-8 -*-
import json
import sys
import base64
import ssl
from six.moves import urllib
from pandasticsearch.errors import ServerDefinedException
class RestClient(object):
"""
RestClient talks to Elasticsearch cluster through native RESTful API.
"""
def __init__(self, url, endpoint='', username=None, password=None, verify_ssl=True):
"""
Initialize the RESTful from the keyword arguments.
:param str url: URL of Broker node in the Elasticsearch cluster
:param str endpoint: Endpoint that Broker listens for queries on
"""
self.url = url
self.endpoint = endpoint
self.username = username
self.password = password
self.verify_ssl = verify_ssl
def _prepare_url(self):
if self.url.endswith('/'):
url = self.url + self.endpoint
else:
url = self.url + '/' + self.endpoint
return url
def get(self, params=None):
"""
Sends a GET request to Elasticsearch.
:param optional params: Dictionary to be sent in the query string.
:return: The response as a dictionary.
>>> from pandasticsearch import RestClient
>>> client = RestClient('http://localhost:9200', '_mapping/index')
>>> print(client.get())
"""
try:
url = self._prepare_url()
username = self.username
password = self.password
verify_ssl = self.verify_ssl
if params is not None:
url = '{0}?{1}'.format(url, urllib.parse.urlencode(params))
req = urllib.request.Request(url=url)
if username is not None and password is not None:
base64creds = base64.b64encode(b'%s:%s' % (username,password)).decode("ascii")
req.add_header("Authorization", "Basic %s" % base64creds)
if verify_ssl is False:
context = ssl._create_unverified_context()
res = urllib.request.urlopen(req, context=context)
else:
res = urllib.request.urlopen(req)
data = res.read().decode("utf-8")
res.close()
except urllib.error.HTTPError:
_, e, _ = sys.exc_info()
reason = None
if e.code != 200:
try:
reason = json.loads(e.read().decode("utf-8"))
except (ValueError, AttributeError, KeyError):
pass
else:
reason = reason.get('error', None)
raise ServerDefinedException(reason)
else:
return json.loads(data)
def post(self, data, params=None):
"""
Sends a POST request to Elasticsearch.
:param data: The json data to send in the body of the request.
:param optional params: Dictionary to be sent in the query string.
:return: The response as a dictionary.
>>> from pandasticsearch import RestClient
>>> client = RestClient('http://localhost:9200', 'index/type/_search')
>>> print(client.post(data={"query":{"match_all":{}}}))
"""
try:
url = self._prepare_url()
username = self.username
password = self.password
verify_ssl = self.verify_ssl
if params is not None:
url = '{0}?{1}'.format(url, urllib.parse.urlencode(params))
req = urllib.request.Request(url=url, data=json.dumps(data).encode('utf-8'),
headers={'Content-Type': 'application/json'})
if username is not None and password is not None:
base64creds = base64.b64encode(b'%s:%s' % (username,password)).decode("ascii")
req.add_header("Authorization", "Basic %s" % base64creds)
if verify_ssl is False:
context = ssl._create_unverified_context()
res = urllib.request.urlopen(req, context=context)
else:
res = urllib.request.urlopen(req)
data = res.read().decode("utf-8")
res.close()
except urllib.error.HTTPError:
_, e, _ = sys.exc_info()
reason = None
if e.code != 200:
try:
reason = json.loads(e.read().decode("utf-8"))
except (ValueError, AttributeError, KeyError):
pass
else:
reason = reason.get('error', None)
raise ServerDefinedException(reason)
else:
return json.loads(data)
| 34.375 | 94 | 0.548663 | 498 | 4,675 | 5.078313 | 0.26506 | 0.032028 | 0.021352 | 0.036378 | 0.667457 | 0.650059 | 0.650059 | 0.650059 | 0.650059 | 0.650059 | 0 | 0.013739 | 0.346096 | 4,675 | 135 | 95 | 34.62963 | 0.813543 | 0.199572 | 0 | 0.72619 | 0 | 0 | 0.039685 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0.119048 | 0.071429 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
861ee5a51af9d9c3fd957ddd0c7d7812c8ec801c | 18 | py | Python | codepunks/version.py | redshodan/codepunks | e07e7aef5fa4748002329c5bb17ed4d51b6292c1 | [
"Apache-2.0"
] | null | null | null | codepunks/version.py | redshodan/codepunks | e07e7aef5fa4748002329c5bb17ed4d51b6292c1 | [
"Apache-2.0"
] | null | null | null | codepunks/version.py | redshodan/codepunks | e07e7aef5fa4748002329c5bb17ed4d51b6292c1 | [
"Apache-2.0"
] | null | null | null | VERSION = "0.1b2"
| 9 | 17 | 0.611111 | 3 | 18 | 3.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 0.166667 | 18 | 1 | 18 | 18 | 0.533333 | 0 | 0 | 0 | 0 | 0 | 0.277778 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
864398ec85b568a503f899aeba043c7c53150d41 | 283 | py | Python | tests/test_dd_finviz_api.py | ooaj/GamestonkTerminal | 6b5b494458b0e01a2db7a06890610454220d8a8f | [
"MIT"
] | 2 | 2021-04-26T14:13:49.000Z | 2021-05-24T04:19:44.000Z | tests/test_dd_finviz_api.py | ooaj/GamestonkTerminal | 6b5b494458b0e01a2db7a06890610454220d8a8f | [
"MIT"
] | null | null | null | tests/test_dd_finviz_api.py | ooaj/GamestonkTerminal | 6b5b494458b0e01a2db7a06890610454220d8a8f | [
"MIT"
] | 2 | 2021-07-08T19:24:29.000Z | 2021-07-12T23:57:24.000Z | """ due_diligence/finviz_api.py tests """
# noqa: F401
import unittest
# pylint: disable=unused-import
import pytest
from gamestonk_terminal.due_diligence.finviz_api import analyst
class TestDdFinvizApi(unittest.TestCase):
def test_analyst(self):
analyst([], "PLTR")
| 20.214286 | 63 | 0.75265 | 35 | 283 | 5.914286 | 0.714286 | 0.115942 | 0.173913 | 0.202899 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012346 | 0.141343 | 283 | 13 | 64 | 21.769231 | 0.839506 | 0.268551 | 0 | 0 | 0 | 0 | 0.020101 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.5 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
8645308d3b6f14391a89c305f983da726437cbe9 | 313 | gyp | Python | binding.gyp | dxg/node-expat-json | c88c56b90fd7a3831cee4cddc27ccf5e6a5b5e15 | [
"MIT"
] | null | null | null | binding.gyp | dxg/node-expat-json | c88c56b90fd7a3831cee4cddc27ccf5e6a5b5e15 | [
"MIT"
] | null | null | null | binding.gyp | dxg/node-expat-json | c88c56b90fd7a3831cee4cddc27ccf5e6a5b5e15 | [
"MIT"
] | null | null | null | {
'targets': [
{
'target_name': 'node_expat_object',
'sources': [
'src/parse.cc',
'src/node-expat-object.cc'
],
'include_dirs': [
'<!(node -e "require(\'nan\')")'
],
'dependencies': [
'deps/libexpat/libexpat.gyp:expat'
]
}
]
}
| 17.388889 | 42 | 0.444089 | 27 | 313 | 5 | 0.703704 | 0.133333 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.351438 | 313 | 17 | 43 | 18.411765 | 0.665025 | 0 | 0 | 0.117647 | 0 | 0 | 0.504792 | 0.178914 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
864905096562e93c9bfa2f614ed3c0e52781a918 | 276 | py | Python | bot/DataBase/models/base.py | ehsanbarkhordar/bot_141 | 6104411339a2607efe477077e9ddab6c441573da | [
"Apache-2.0"
] | null | null | null | bot/DataBase/models/base.py | ehsanbarkhordar/bot_141 | 6104411339a2607efe477077e9ddab6c441573da | [
"Apache-2.0"
] | null | null | null | bot/DataBase/models/base.py | ehsanbarkhordar/bot_141 | 6104411339a2607efe477077e9ddab6c441573da | [
"Apache-2.0"
] | null | null | null | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from main_config import DbConfig
engine = create_engine(DbConfig.database_url)
Session = sessionmaker(bind=engine)
Base = declarative_base()
| 27.6 | 55 | 0.851449 | 35 | 276 | 6.542857 | 0.485714 | 0.183406 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097826 | 276 | 9 | 56 | 30.666667 | 0.919679 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.571429 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
865b774e2fdf270352cdfdf8b15b38265872755e | 298 | py | Python | workflow/lib/idds/workflow/__init__.py | SergeyPod/iDDS | 062acb2414bff31c196f819db896809365246ede | [
"Apache-2.0"
] | null | null | null | workflow/lib/idds/workflow/__init__.py | SergeyPod/iDDS | 062acb2414bff31c196f819db896809365246ede | [
"Apache-2.0"
] | 1 | 2020-10-01T15:47:50.000Z | 2020-10-28T17:55:01.000Z | workflow/lib/idds/workflow/__init__.py | SergeyPod/iDDS | 062acb2414bff31c196f819db896809365246ede | [
"Apache-2.0"
] | 5 | 2019-09-14T20:34:41.000Z | 2021-12-18T10:46:58.000Z | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0OA
#
# Authors:
# - Wen Guan, <wen.guan@cern.ch>, 2019
| 29.8 | 66 | 0.711409 | 51 | 298 | 4.156863 | 0.745098 | 0.141509 | 0.122642 | 0.150943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031873 | 0.157718 | 298 | 9 | 67 | 33.111111 | 0.812749 | 0.939597 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
86608c45fdde4fa64da797301e6f6679a2660dce | 1,585 | py | Python | release/stubs.min/System/Diagnostics/__init___parts/EventTypeFilter.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Diagnostics/__init___parts/EventTypeFilter.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Diagnostics/__init___parts/EventTypeFilter.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | class EventTypeFilter(TraceFilter):
"""
Indicates whether a listener should trace based on the event type.
EventTypeFilter(level: SourceLevels)
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return EventTypeFilter()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def ShouldTrace(self,cache,source,eventType,id,formatOrMessage,args,data1,data):
"""
ShouldTrace(self: EventTypeFilter,cache: TraceEventCache,source: str,eventType: TraceEventType,id: int,formatOrMessage: str,args: Array[object],data1: object,data: Array[object]) -> bool
Determines whether the trace listener should trace the event.
cache: A System.Diagnostics.TraceEventCache that represents the information cache for the trace event.
source: The name of the source.
eventType: One of the System.Diagnostics.TraceEventType values.
id: A trace identifier number.
formatOrMessage: The format to use for writing an array of arguments,or a message to write.
args: An array of argument objects.
data1: A trace data object.
data: An array of trace data objects.
Returns: trueif the trace should be produced; otherwise,false.
"""
pass
@staticmethod
def __new__(self,level):
""" __new__(cls: type,level: SourceLevels) """
pass
EventType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the event type of the messages to trace.
Get: EventType(self: EventTypeFilter) -> SourceLevels
Set: EventType(self: EventTypeFilter)=value
"""
| 37.738095 | 190 | 0.720505 | 200 | 1,585 | 5.67 | 0.415 | 0.022046 | 0.02381 | 0.031746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002326 | 0.18612 | 1,585 | 41 | 191 | 38.658537 | 0.876744 | 0.594322 | 0 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.3 | false | 0.2 | 0 | 0 | 0.7 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
867561ff1e21103f9974f0b9b91a87f8b6ad4441 | 2,345 | py | Python | src/click/__init__.py | tucked/click | 93551dd44ced28fae4bd404cf24ec89b589e6568 | [
"BSD-3-Clause"
] | null | null | null | src/click/__init__.py | tucked/click | 93551dd44ced28fae4bd404cf24ec89b589e6568 | [
"BSD-3-Clause"
] | null | null | null | src/click/__init__.py | tucked/click | 93551dd44ced28fae4bd404cf24ec89b589e6568 | [
"BSD-3-Clause"
] | null | null | null | """
Click is a simple Python module inspired by the stdlib optparse to make
writing command line scripts fun. Unlike other modules, it's based
around a simple API that does not come with too much magic and is
composable.
"""
from .core import Argument
from .core import BaseCommand
from .core import Command
from .core import CommandCollection
from .core import Context
from .core import Group
from .core import MultiCommand
from .core import Option
from .core import Parameter
from .decorators import argument
from .decorators import command
from .decorators import confirmation_option
from .decorators import group
from .decorators import help_option
from .decorators import make_pass_decorator
from .decorators import option
from .decorators import pass_context
from .decorators import pass_obj
from .decorators import password_option
from .decorators import version_option
from .exceptions import Abort
from .exceptions import BadArgumentUsage
from .exceptions import BadOptionUsage
from .exceptions import BadParameter
from .exceptions import ClickException
from .exceptions import FileError
from .exceptions import MissingParameter
from .exceptions import NoSuchOption
from .exceptions import UsageError
from .formatting import HelpFormatter
from .formatting import wrap_text
from .globals import get_current_context
from .parser import OptionParser
from .termui import clear
from .termui import confirm
from .termui import echo_via_pager
from .termui import edit
from .termui import get_terminal_size
from .termui import getchar
from .termui import launch
from .termui import pause
from .termui import progressbar
from .termui import prompt
from .termui import secho
from .termui import style
from .termui import unstyle
from .types import BOOL
from .types import Choice
from .types import DateTime
from .types import File
from .types import FLOAT
from .types import FloatRange
from .types import INT
from .types import IntRange
from .types import ParamType
from .types import Path
from .types import STRING
from .types import Tuple
from .types import UNPROCESSED
from .types import UUID
from .utils import echo
from .utils import format_filename
from .utils import get_app_dir
from .utils import get_binary_stream
from .utils import get_os_args
from .utils import get_text_stream
from .utils import open_file
__version__ = "8.1.0.dev0"
| 30.855263 | 71 | 0.831557 | 337 | 2,345 | 5.700297 | 0.356083 | 0.065591 | 0.109318 | 0.054138 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001968 | 0.133049 | 2,345 | 75 | 72 | 31.266667 | 0.942941 | 0.092111 | 0 | 0 | 0 | 0 | 0.004715 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.058824 | 0.985294 | 0 | 0.985294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 3 |
8676d3f49a97df7bcfcea4a7d384773673ae3382 | 819 | py | Python | utils/scheduler.py | ivclab/Multistage_Pruning | 0fb7e084f56d565c27dd9c4536cc95204eecf926 | [
"BSD-3-Clause"
] | 11 | 2020-04-07T02:23:53.000Z | 2021-09-02T13:54:47.000Z | utils/scheduler.py | van-hub/Multistage_Pruning | 0fb7e084f56d565c27dd9c4536cc95204eecf926 | [
"BSD-3-Clause"
] | null | null | null | utils/scheduler.py | van-hub/Multistage_Pruning | 0fb7e084f56d565c27dd9c4536cc95204eecf926 | [
"BSD-3-Clause"
] | 3 | 2020-11-07T17:10:08.000Z | 2021-01-18T02:26:56.000Z | import math
from torch.optim import lr_scheduler
class EpochBasedExponentialLR(lr_scheduler._LRScheduler):
def __init__(self, optimizer, decay=0.96, last_epoch=-1):
self.decay = decay
super(EpochBasedExponentialLR, self).__init__(optimizer, last_epoch)
return
def get_lr(self):
return [base_lr * (self.decay ** self.last_epoch)
for base_lr in self.base_lrs]
class EpochBasedCosineLR(lr_scheduler._LRScheduler):
def __init__(self, optimizer, max_epochs, last_epoch=-1):
self.max_epochs = max_epochs
super(EpochBasedCosineLR, self).__init__(optimizer, last_epoch)
return
def get_lr(self):
return [0.5 * base_lr * (1 + math.cos(math.pi * self.last_epoch / self.max_epochs))
for base_lr in self.base_lrs]
| 32.76 | 91 | 0.68254 | 107 | 819 | 4.859813 | 0.308411 | 0.103846 | 0.084615 | 0.096154 | 0.438462 | 0.438462 | 0.438462 | 0.192308 | 0.192308 | 0.192308 | 0 | 0.012559 | 0.222222 | 819 | 24 | 92 | 34.125 | 0.803768 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0.111111 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
867d69547b2c145c03432071b7232bc477e0306f | 314 | py | Python | 01_Language/01_Functions/python/array_pad.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 3 | 2020-06-28T07:42:51.000Z | 2021-01-15T10:32:11.000Z | 01_Language/01_Functions/python/array_pad.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 9 | 2021-03-10T22:45:40.000Z | 2022-02-27T06:53:20.000Z | 01_Language/01_Functions/python/array_pad.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 1 | 2021-01-15T10:51:24.000Z | 2021-01-15T10:51:24.000Z | # coding: utf-8
def array_pad(l, size, value):
if size >= 0:
return l + [value] * (size - len(l))
else:
return [value] * (size * -1 - len(l)) + l
if __name__ == '__main__':
l = [12, 10, 9]
print(array_pad(l, 5, 0))
print(array_pad(l, -7, -1))
print(array_pad(l, 2, 999))
| 20.933333 | 49 | 0.512739 | 51 | 314 | 2.921569 | 0.490196 | 0.214765 | 0.241611 | 0.281879 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071749 | 0.289809 | 314 | 14 | 50 | 22.428571 | 0.596413 | 0.041401 | 0 | 0 | 0 | 0 | 0.026756 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.3 | 0.3 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
86cbf2291d6e0c6ec1c3090a2a5ffb27b020a3ae | 6,135 | py | Python | tests/farm/views/_test_template_resource.py | szkkteam/agrosys | a390332202f7200632d2ff3816e1b0f3cc76f586 | [
"MIT"
] | null | null | null | tests/farm/views/_test_template_resource.py | szkkteam/agrosys | a390332202f7200632d2ff3816e1b0f3cc76f586 | [
"MIT"
] | null | null | null | tests/farm/views/_test_template_resource.py | szkkteam/agrosys | a390332202f7200632d2ff3816e1b0f3cc76f586 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Common Python library imports
# Pip package imports
import pytest
from flask import url_for
from flask_security import AnonymousUser, current_user
# Internal package imports
from backend.farm.models import Production, Farm, Template
TASK_GENERAL_1 = {
'title': 'task general',
'taskType': 'TaskGeneral',
'status': 'Pending',
'description': 'some text',
'dates': {
'startDate': '2020-07-21T20:00:00',
'endDate': '2020-07-22T20:00:00',
},
'predictedCost': 1,
'actualCost': None,
}
TASK_GENERAL_2 = {
'title': 'task general 2',
'taskType': 'TaskGeneral',
'status': 'Pending',
'description': 'some text',
'dates': {
'startDate': '2020-07-21T20:00:00',
'endDate': '2020-07-22T20:00:00',
},
'predictedCost': 1,
'actualCost': None,
}
TASK_PRUNING_1 = {
'title': 'task pruning',
'taskType': 'TaskPruning',
'status': 'Pending',
'description': 'some text',
'dates': {
'startDate': '2020-07-21T20:00:00',
'endDate': '2020-07-22T20:00:00',
},
'predictedCost': None,
'actualCost': None,
}
VALID_INPUT_DATA = [
TASK_PRUNING_1, TASK_GENERAL_1
]
NEW_PRODUCTION_DATA = {
'title': 'Production 1',
'tasks': VALID_INPUT_DATA,
}
def get_input_data(input):
data = input.copy()
return data
class TestTemplateResource:
def test_create(self, api_client, farm_owner):
api_client.login_as(farm_owner)
data = get_input_data(NEW_PRODUCTION_DATA)
farm = Farm.all()[0]
r = api_client.post(url_for('api.templates_resource', farm_id=farm.id), data=data)
assert r.status_code == 201
assert 'id' in r.json
assert 'title' in r.json
assert 'tasks' in r.json
assert len(r.json['tasks'])
def test_get(self, api_client, farm_owner):
api_client.login_as(farm_owner)
template = Template.all()[0]
r = api_client.get(url_for('api.template_resource', id=template.id))
assert r.status_code == 200
assert 'id' in r.json
assert 'title' in r.json
assert 'tasks' in r.json
assert len(r.json['tasks'])
def test_list(self, api_client, farm_owner):
api_client.login_as(farm_owner)
farm = Farm.all()[0]
r = api_client.get(url_for('api.templates_resource', farm_id=farm.id))
assert r.status_code == 200
assert len(r.json)
for e in r.json:
assert 'id' in e
assert 'title' in e
assert 'tasks' in e
assert len(e['tasks'])
@pytest.mark.parametrize("models", ['Template(TEMPLATE_3)'], indirect=True)
def test_list_default(self, api_client, farm_owner, models):
api_client.login_as(farm_owner)
print("Templates: ", Template.all())
r = api_client.get(url_for('api.farm_template_default_resource'))
assert r.status_code == 200
assert len(r.json)
for e in r.json:
assert 'id' in e
assert 'title' in e
assert 'tasks' in e
def test_patch(self, api_client, farm_owner):
api_client.login_as(farm_owner)
template = Template.all()[0]
new_name = "New template name"
r = api_client.patch(url_for('api.template_resource', id=template.id), data=dict(title=new_name))
assert r.status_code == 200
assert r.json['title'] == new_name
assert 'id' in r.json
def test_put(self, api_client, farm_owner):
api_client.login_as(farm_owner)
template = Template.all()[0]
data = get_input_data(NEW_PRODUCTION_DATA)
data['title'] = "New template name"
r = api_client.put(url_for('api.template_resource', id=template.id), data=data)
assert r.status_code == 200
assert r.json['title'] == data['title']
assert 'id' in r.json
def test_put_modify_task(self, api_client, farm_owner):
from backend.farm.models import Task
api_client.login_as(farm_owner)
template = Template.all()[0]
old_tasks = template.tasks
data = get_input_data(NEW_PRODUCTION_DATA)
data['title'] = "New template name"
data['tasks'][0]['title'] = 'Updated task name'
data['tasks'][1]['title'] = 'Updated task name'
r = api_client.put(url_for('api.template_resource', id=template.id), data=data)
assert r.status_code == 200
assert r.json['title'] == data['title']
assert 'id' in r.json
for task in r.json['tasks']:
assert 'Updated task name' == task['title']
for ot in old_tasks:
assert ot.id != task['id']
@pytest.mark.skip(reason="Database cascades not be reworked, because related objects are not deleted.")
def test_delete(self, api_client, farm_owner):
api_client.login_as(farm_owner)
template = Template.all()[0]
r = api_client.delete(url_for('api.template_resource', id=template.id))
assert r.status_code == 204
assert not Template.get(template.id)
class TestFarmTemplateResource:
@pytest.mark.parametrize("models", ['Template(TEMPLATE_3)'], indirect=True)
def test_put(self, api_client, farm_owner, models):
api_client.login_as(farm_owner)
farm = Farm.all()[0]
template = models.TEMPLATE_3
len_templates = len(farm.templates)
r = api_client.put(url_for('api.farm_template_resource', farm_id=farm.id, template_id=template.id))
assert r.status_code == 200
assert 'id' in r.json
assert (len_templates + 1) == len(Farm.get(farm.id).templates)
def test_delete(self, api_client, farm_owner):
api_client.login_as(farm_owner)
farm = Farm.all()[0]
template = Template.all()[0]
len_templates = len(farm.templates)
r = api_client.delete(url_for('api.farm_template_resource', farm_id=farm.id, template_id=template.id))
assert r.status_code == 204
assert (len_templates - 1) == len(Farm.get(farm.id).templates)
| 30.221675 | 110 | 0.622331 | 825 | 6,135 | 4.436364 | 0.14303 | 0.07377 | 0.024863 | 0.046448 | 0.743716 | 0.719672 | 0.712842 | 0.680874 | 0.649727 | 0.600273 | 0 | 0.0305 | 0.246455 | 6,135 | 202 | 111 | 30.371287 | 0.761194 | 0.019071 | 0 | 0.578947 | 0 | 0 | 0.181304 | 0.039088 | 0 | 0 | 0 | 0 | 0.256579 | 1 | 0.072368 | false | 0 | 0.032895 | 0 | 0.125 | 0.006579 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
86d010eaf657aedad80f308c9b60023add60128d | 261 | py | Python | tests/io/file_readline.py | sebastien-riou/micropython | 116c15842fd48ddb77b0bc016341d936a0756573 | [
"MIT"
] | 13,648 | 2015-01-01T01:34:51.000Z | 2022-03-31T16:19:53.000Z | tests/io/file_readline.py | sebastien-riou/micropython | 116c15842fd48ddb77b0bc016341d936a0756573 | [
"MIT"
] | 7,092 | 2015-01-01T07:59:11.000Z | 2022-03-31T23:52:18.000Z | tests/io/file_readline.py | sebastien-riou/micropython | 116c15842fd48ddb77b0bc016341d936a0756573 | [
"MIT"
] | 4,942 | 2015-01-02T11:48:50.000Z | 2022-03-31T19:57:10.000Z | f = open("io/data/file1")
print(f.readline())
print(f.readline(3))
print(f.readline(4))
print(f.readline(5))
print(f.readline())
# readline() on writable file
f = open("io/data/file1", "ab")
try:
f.readline()
except OSError:
print("OSError")
f.close()
| 17.4 | 31 | 0.659004 | 42 | 261 | 4.095238 | 0.428571 | 0.313953 | 0.406977 | 0.127907 | 0.186047 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021834 | 0.122605 | 261 | 14 | 32 | 18.642857 | 0.729258 | 0.103448 | 0 | 0.166667 | 0 | 0 | 0.150862 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
86e7370c18061dfa6f443976ebea18674e94a920 | 700 | py | Python | blog/pages/models.py | Jamilur-rahman-470/Django_blog | 2d605ff5c4c77a3b065fbc5579b2d90b8287e432 | [
"MIT"
] | null | null | null | blog/pages/models.py | Jamilur-rahman-470/Django_blog | 2d605ff5c4c77a3b065fbc5579b2d90b8287e432 | [
"MIT"
] | null | null | null | blog/pages/models.py | Jamilur-rahman-470/Django_blog | 2d605ff5c4c77a3b065fbc5579b2d90b8287e432 | [
"MIT"
] | null | null | null | from django.db import models
from ckeditor.fields import RichTextField
# Create your models here.
class HeaderText(models.Model):
name = models.CharField(max_length=50)
txt1 = models.CharField(max_length=50)
txt2 = models.CharField(max_length=50)
txt3 = models.CharField(max_length=50)
class Posts(models.Model):
title = models.CharField(max_length=70)
excrept = models.CharField(max_length=125)
slug = models.SlugField(max_length=70)
body = RichTextField()
date = models.DateField(auto_now=True)
author = models.CharField(max_length=50)
thumb = models.ImageField(default= False, upload_to= 'media/')
def __str__(self):
return self.title | 33.333333 | 66 | 0.724286 | 92 | 700 | 5.358696 | 0.51087 | 0.146045 | 0.255578 | 0.340771 | 0.263692 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034423 | 0.17 | 700 | 21 | 67 | 33.333333 | 0.814114 | 0.034286 | 0 | 0 | 0 | 0 | 0.008889 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.117647 | 0.058824 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
86f57f63d4d34b0d0e176e8e719d361e3e250b7a | 555 | py | Python | RecoHI/HiTracking/python/HIPixelTripletSeeds_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoHI/HiTracking/python/HIPixelTripletSeeds_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoHI/HiTracking/python/HIPixelTripletSeeds_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
# pixel track producer (with vertex)
from RecoHI.HiTracking.HIPixel3PrimTracks_cfi import *
from RecoTracker.TkSeedingLayers.PixelLayerTriplets_cfi import *
# pixel seeds
import RecoPixelVertexing.PixelLowPtUtilities.TrackSeeds_cfi
hiPixelTrackSeeds = RecoPixelVertexing.PixelLowPtUtilities.TrackSeeds_cfi.pixelTrackSeeds.clone(
InputCollection = 'hiPixel3PrimTracks'
)
hiPrimSeedsTask = cms.Task( PixelLayerTriplets , hiPixel3PrimTracksTask , hiPixelTrackSeeds )
hiPrimSeeds = cms.Sequence(hiPrimSeedsTask)
| 37 | 96 | 0.852252 | 49 | 555 | 9.571429 | 0.653061 | 0.03838 | 0.200426 | 0.21322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005917 | 0.086486 | 555 | 14 | 97 | 39.642857 | 0.919132 | 0.082883 | 0 | 0 | 0 | 0 | 0.035573 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.444444 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
86fee2c8abbf44a8074c14c1e73dc66f69b4ba71 | 412 | py | Python | pyautogui/find_click.py | kanokkorn/python-sktch | c43012ba3cbca0d9161eff26e8ccc3e5d2cb3aab | [
"MIT"
] | null | null | null | pyautogui/find_click.py | kanokkorn/python-sktch | c43012ba3cbca0d9161eff26e8ccc3e5d2cb3aab | [
"MIT"
] | 12 | 2018-11-19T02:50:16.000Z | 2020-03-01T15:04:31.000Z | pyautogui/find_click.py | kanokkorn/python-sktch | c43012ba3cbca0d9161eff26e8ccc3e5d2cb3aab | [
"MIT"
] | null | null | null | import pyautogui as pg
spotify_icon = pg.locateCenterOnScreen("./pyautogui/image/spotify.png", confidence=0.9)
pg.click(spotify_icon)
search_bar = pg.locateCenterOnScreen("./pyautogui/image/search_bar.png", confidence=0.9)
pg.click(search_bar)
pg.typewrite("narrative", interval=0.2)
pg.hotkey("enter")
narrative = pg.locateCenterOnScreen("./pyautogui/image/narrative.png", confidence=0.9)
pg.click(narrative)
| 34.333333 | 88 | 0.786408 | 57 | 412 | 5.596491 | 0.368421 | 0.206897 | 0.291536 | 0.338558 | 0.206897 | 0.206897 | 0 | 0 | 0 | 0 | 0 | 0.020619 | 0.058252 | 412 | 11 | 89 | 37.454545 | 0.801546 | 0 | 0 | 0 | 0 | 0 | 0.257908 | 0.223844 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8106b69b95e8178a305441191f4c79c07bf50884 | 425 | py | Python | recompile/decorators.py | NiumXp/recompile | c92c46bcc9dda2983c2a0b6ea454271c471d3eb8 | [
"MIT"
] | 1 | 2022-03-25T18:48:24.000Z | 2022-03-25T18:48:24.000Z | recompile/decorators.py | NiumXp/recompile | c92c46bcc9dda2983c2a0b6ea454271c471d3eb8 | [
"MIT"
] | null | null | null | recompile/decorators.py | NiumXp/recompile | c92c46bcc9dda2983c2a0b6ea454271c471d3eb8 | [
"MIT"
] | null | null | null | from .utils import (
isint
)
def swap(
idx,
idx_or_op,
**options,
):
if isint(idx_or_op):
if idx == idx_or_op:
raise ValueError()
...
def replace(
idx_or_op_or_func,
idx_or_op_or_seq=None,
**options,
):
if isint(idx_or_op_or_func) and isint(idx_or_op_or_seq):
return swap(
idx_or_op_or_func,
idx_or_op_or_seq,
)
...
| 14.655172 | 60 | 0.555294 | 63 | 425 | 3.269841 | 0.301587 | 0.218447 | 0.305825 | 0.262136 | 0.558252 | 0.446602 | 0.242718 | 0.242718 | 0.242718 | 0.242718 | 0 | 0 | 0.343529 | 425 | 28 | 61 | 15.178571 | 0.738351 | 0 | 0 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0.043478 | 0 | 0.173913 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
8108e5ea7d8d70f11bc80a66ef723e99f9cf5848 | 171 | py | Python | thenewboston_node/project/settings/blockchain.py | nishp77/thenewboston-node | 158b1f1739b2c6c9c21c80e9da854ca141f1cf8f | [
"MIT"
] | 30 | 2021-03-05T22:08:17.000Z | 2021-09-23T02:45:45.000Z | thenewboston_node/project/settings/blockchain.py | nishp77/thenewboston-node | 158b1f1739b2c6c9c21c80e9da854ca141f1cf8f | [
"MIT"
] | 148 | 2021-03-05T23:37:50.000Z | 2021-11-02T02:18:58.000Z | thenewboston_node/project/settings/blockchain.py | nishp77/thenewboston-node | 158b1f1739b2c6c9c21c80e9da854ca141f1cf8f | [
"MIT"
] | 14 | 2021-03-05T21:58:46.000Z | 2021-10-15T17:27:52.000Z | BLOCKCHAIN = {
'class': 'thenewboston_node.business_logic.blockchain.file_blockchain.FileBlockchain',
'kwargs': {},
}
BLOCKCHAIN_URL_PATH_PREFIX = '/blockchain/'
| 24.428571 | 90 | 0.74269 | 16 | 171 | 7.5625 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116959 | 171 | 6 | 91 | 28.5 | 0.801325 | 0 | 0 | 0 | 0 | 0 | 0.567251 | 0.432749 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
810e67f544272b1e549fdda20a6add16946172f8 | 87,646 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/topology/pppoxclient_57c51b5ca094121e33c3a9ba5033980f.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/topology/pppoxclient_57c51b5ca094121e33c3a9ba5033980f.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/topology/pppoxclient_57c51b5ca094121e33c3a9ba5033980f.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Pppoxclient(Base):
"""PPPoX Client
The Pppoxclient class encapsulates a list of pppoxclient resources that are managed by the user.
A list of resources can be retrieved from the server using the Pppoxclient.find() method.
The list can be managed by using the Pppoxclient.add() and Pppoxclient.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'pppoxclient'
_SDM_ATT_MAP = {
'AcMatchMac': 'acMatchMac',
'AcMatchName': 'acMatchName',
'AcOptions': 'acOptions',
'ActualRateDownstream': 'actualRateDownstream',
'ActualRateUpstream': 'actualRateUpstream',
'AgentAccessAggregationCircuitId': 'agentAccessAggregationCircuitId',
'AgentCircuitId': 'agentCircuitId',
'AgentRemoteId': 'agentRemoteId',
'AuthRetries': 'authRetries',
'AuthTimeout': 'authTimeout',
'AuthType': 'authType',
'ChapName': 'chapName',
'ChapSecret': 'chapSecret',
'ClientDnsOptions': 'clientDnsOptions',
'ClientLocalIp': 'clientLocalIp',
'ClientLocalIpv6Iid': 'clientLocalIpv6Iid',
'ClientNcpOptions': 'clientNcpOptions',
'ClientNetmask': 'clientNetmask',
'ClientNetmaskOptions': 'clientNetmaskOptions',
'ClientPrimaryDnsAddress': 'clientPrimaryDnsAddress',
'ClientSecondaryDnsAddress': 'clientSecondaryDnsAddress',
'ClientSignalIWF': 'clientSignalIWF',
'ClientSignalLoopChar': 'clientSignalLoopChar',
'ClientSignalLoopEncapsulation': 'clientSignalLoopEncapsulation',
'ClientSignalLoopId': 'clientSignalLoopId',
'ClientV6NcpOptions': 'clientV6NcpOptions',
'ClientWinsOptions': 'clientWinsOptions',
'ClientWinsPrimaryAddress': 'clientWinsPrimaryAddress',
'ClientWinsSecondaryAddress': 'clientWinsSecondaryAddress',
'ConnectSpeedUpdateEnable': 'connectSpeedUpdateEnable',
'ConnectedVia': 'connectedVia',
'Count': 'count',
'DataLink': 'dataLink',
'DescriptiveName': 'descriptiveName',
'DiscoveredIpv4Addresses': 'discoveredIpv4Addresses',
'DiscoveredIpv6Addresses': 'discoveredIpv6Addresses',
'DiscoveredMacs': 'discoveredMacs',
'DiscoveredRemoteSessionIds': 'discoveredRemoteSessionIds',
'DiscoveredRemoteTunnelIds': 'discoveredRemoteTunnelIds',
'DiscoveredSessionIds': 'discoveredSessionIds',
'DiscoveredTunnelIPs': 'discoveredTunnelIPs',
'DiscoveredTunnelIds': 'discoveredTunnelIds',
'DomainList': 'domainList',
'DslTypeTlv': 'dslTypeTlv',
'EchoReqInterval': 'echoReqInterval',
'EnableDomainGroups': 'enableDomainGroups',
'EnableEchoReq': 'enableEchoReq',
'EnableEchoRsp': 'enableEchoRsp',
'EnableHostUniq': 'enableHostUniq',
'EnableMaxPayload': 'enableMaxPayload',
'EnableRedial': 'enableRedial',
'Encaps1': 'encaps1',
'Encaps2': 'encaps2',
'EndpointDiscNegotiation': 'endpointDiscNegotiation',
'EndpointDiscriminatorClass': 'endpointDiscriminatorClass',
'Errors': 'errors',
'HostUniq': 'hostUniq',
'HostUniqLength': 'hostUniqLength',
'LcpAccm': 'lcpAccm',
'LcpEnableAccm': 'lcpEnableAccm',
'LcpMaxFailure': 'lcpMaxFailure',
'LcpRetries': 'lcpRetries',
'LcpStartDelay': 'lcpStartDelay',
'LcpTermRetries': 'lcpTermRetries',
'LcpTimeout': 'lcpTimeout',
'MaxPayload': 'maxPayload',
'MlpppIPAddress': 'mlpppIPAddress',
'MlpppMACAddress': 'mlpppMACAddress',
'Mrru': 'mrru',
'MrruNegotiation': 'mrruNegotiation',
'MruNegotiation': 'mruNegotiation',
'Mtu': 'mtu',
'Multiplier': 'multiplier',
'Name': 'name',
'NcpRetries': 'ncpRetries',
'NcpTimeout': 'ncpTimeout',
'NcpType': 'ncpType',
'PadiRetries': 'padiRetries',
'PadiTimeout': 'padiTimeout',
'PadrRetries': 'padrRetries',
'PadrTimeout': 'padrTimeout',
'PapPassword': 'papPassword',
'PapUser': 'papUser',
'PonTypeTlv': 'ponTypeTlv',
'RedialMax': 'redialMax',
'RedialTimeout': 'redialTimeout',
'RxConnectSpeed': 'rxConnectSpeed',
'ServiceName': 'serviceName',
'ServiceOptions': 'serviceOptions',
'SessionInfo': 'sessionInfo',
'SessionStatus': 'sessionStatus',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
'TxConnectSpeed': 'txConnectSpeed',
'UnlimitedRedialAttempts': 'unlimitedRedialAttempts',
'UserDefinedDslType': 'userDefinedDslType',
'UserDefinedPonType': 'userDefinedPonType',
}
def __init__(self, parent):
super(Pppoxclient, self).__init__(parent)
@property
def Bfdv4Interface(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv4interface_91b557a3f744baf442dbe21ac75e8f2e.Bfdv4Interface): An instance of the Bfdv4Interface class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv4interface_91b557a3f744baf442dbe21ac75e8f2e import Bfdv4Interface
return Bfdv4Interface(self)
@property
def Bfdv6Interface(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv6interface_b9a91920db1b70c8c6410d2de0b438d3.Bfdv6Interface): An instance of the Bfdv6Interface class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv6interface_b9a91920db1b70c8c6410d2de0b438d3 import Bfdv6Interface
return Bfdv6Interface(self)
@property
def BgpIpv4Peer(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.bgpipv4peer_9dd9eddcf2bd784d82d8a016e392f035.BgpIpv4Peer): An instance of the BgpIpv4Peer class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.bgpipv4peer_9dd9eddcf2bd784d82d8a016e392f035 import BgpIpv4Peer
return BgpIpv4Peer(self)
@property
def BgpIpv6Peer(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.bgpipv6peer_d4ac277d9da759fd5a152b8e6eb0ab20.BgpIpv6Peer): An instance of the BgpIpv6Peer class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.bgpipv6peer_d4ac277d9da759fd5a152b8e6eb0ab20 import BgpIpv6Peer
return BgpIpv6Peer(self)
@property
def Connector(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector): An instance of the Connector class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b import Connector
return Connector(self)
@property
def Dhcpv6client(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.dhcpv6client_355391ba11ab3c1555c827e2e4ac3c4c.Dhcpv6client): An instance of the Dhcpv6client class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.dhcpv6client_355391ba11ab3c1555c827e2e4ac3c4c import Dhcpv6client
return Dhcpv6client(self)
@property
def ECpriRe(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ecprire_51f1030cbafd2e567d3b517032a1b011.ECpriRe): An instance of the ECpriRe class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.ecprire_51f1030cbafd2e567d3b517032a1b011 import ECpriRe
return ECpriRe(self)
@property
def ECpriRec(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ecprirec_129f1d43f285a4f806ade4e0df814255.ECpriRec): An instance of the ECpriRec class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.ecprirec_129f1d43f285a4f806ade4e0df814255 import ECpriRec
return ECpriRec(self)
@property
def Geneve(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.geneve_14ab6f140956b4fc77d1d0f03c5e7514.Geneve): An instance of the Geneve class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.geneve_14ab6f140956b4fc77d1d0f03c5e7514 import Geneve
return Geneve(self)
@property
def IgmpHost(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.igmphost_8940887674c0387469423e8df3a33854.IgmpHost): An instance of the IgmpHost class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.igmphost_8940887674c0387469423e8df3a33854 import IgmpHost
return IgmpHost(self)
@property
def IgmpQuerier(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.igmpquerier_38c883b0cec7ffb5405af90bf1b8cda5.IgmpQuerier): An instance of the IgmpQuerier class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.igmpquerier_38c883b0cec7ffb5405af90bf1b8cda5 import IgmpQuerier
return IgmpQuerier(self)
@property
def MldHost(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.mldhost_824a1bed927138d4bb32f7d2631197a5.MldHost): An instance of the MldHost class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.mldhost_824a1bed927138d4bb32f7d2631197a5 import MldHost
return MldHost(self)
@property
def MldQuerier(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.mldquerier_e20671d730d138d65036e88d7cad63ac.MldQuerier): An instance of the MldQuerier class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.mldquerier_e20671d730d138d65036e88d7cad63ac import MldQuerier
return MldQuerier(self)
@property
def MplsOam(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.mplsoam_e01bb6affe899a4731aa60619f4aeadc.MplsOam): An instance of the MplsOam class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.mplsoam_e01bb6affe899a4731aa60619f4aeadc import MplsOam
return MplsOam(self)
@property
def NetconfClient(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.netconfclient_1eaa2ab0efacd988796bdc1f5fe4291c.NetconfClient): An instance of the NetconfClient class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.netconfclient_1eaa2ab0efacd988796bdc1f5fe4291c import NetconfClient
return NetconfClient(self)
@property
def NetconfServer(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.netconfserver_ad256f8ca38068f1eaff839ed40b1e30.NetconfServer): An instance of the NetconfServer class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.netconfserver_ad256f8ca38068f1eaff839ed40b1e30 import NetconfServer
return NetconfServer(self)
@property
def Ospfv2(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv2_27b7a27a991a50e01e629b9de482a2f0.Ospfv2): An instance of the Ospfv2 class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv2_27b7a27a991a50e01e629b9de482a2f0 import Ospfv2
return Ospfv2(self)
@property
def Ospfv3(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3_c029fd7cd4a9e9897b7b4e4547458751.Ospfv3): An instance of the Ospfv3 class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.ospfv3_c029fd7cd4a9e9897b7b4e4547458751 import Ospfv3
return Ospfv3(self)
@property
def Pcc(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pcc_9346785b55d17399fecd6fe36c418219.Pcc): An instance of the Pcc class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pcc_9346785b55d17399fecd6fe36c418219 import Pcc
return Pcc(self)
@property
def Pce(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pce_bd5f6a11078a4f0deb5d56bef8e9674f.Pce): An instance of the Pce class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pce_bd5f6a11078a4f0deb5d56bef8e9674f import Pce
return Pce(self)
@property
def PimV4Interface(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pimv4interface_92603cbceaf153039f7575ed9bc4aa67.PimV4Interface): An instance of the PimV4Interface class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pimv4interface_92603cbceaf153039f7575ed9bc4aa67 import PimV4Interface
return PimV4Interface(self)
@property
def PimV6Interface(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.pimv6interface_74a3aa08a315ca50732e853e3e8cdc43.PimV6Interface): An instance of the PimV6Interface class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.pimv6interface_74a3aa08a315ca50732e853e3e8cdc43 import PimV6Interface
return PimV6Interface(self)
@property
def Tag(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.tag_e30f24de79247381d4dfd423b2f6986d.Tag): An instance of the Tag class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.tag_e30f24de79247381d4dfd423b2f6986d import Tag
return Tag(self)
@property
def TlvProfile(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.tlvprofile_69db000d3ef3b060f5edc387b878736c.TlvProfile): An instance of the TlvProfile class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.tlvprofile_69db000d3ef3b060f5edc387b878736c import TlvProfile
return TlvProfile(self)
@property
def Vxlan(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.vxlan_ed3df6fe7146492fc5fe0f77f53f9473.Vxlan): An instance of the Vxlan class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.vxlan_ed3df6fe7146492fc5fe0f77f53f9473 import Vxlan
return Vxlan(self)
@property
def Vxlanv6(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.vxlanv6_c18187deccae3db44b9e9de30ad538ec.Vxlanv6): An instance of the Vxlanv6 class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.vxlanv6_c18187deccae3db44b9e9de30ad538ec import Vxlanv6
return Vxlanv6(self)
@property
def AcMatchMac(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): ?
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AcMatchMac']))
@property
def AcMatchName(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): ?
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AcMatchName']))
@property
def AcOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates PPPoE AC retrieval mode
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AcOptions']))
@property
def ActualRateDownstream(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter specifies the value to be included in the vendor specific PPPoE tag. It is the actual downstream data rate (sub-option 0x81), in kbps.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ActualRateDownstream']))
@property
def ActualRateUpstream(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter specifies the value to be included in the vendor specific PPPoE tag. It is the actual upstream data rate (sub-option 0x82), in kbps.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ActualRateUpstream']))
@property
def AgentAccessAggregationCircuitId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The value to be inserted into the Agent Access-Aggregation-Circuit-ID-ASCII-Value field of the PPPoX tag.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AgentAccessAggregationCircuitId']))
@property
def AgentCircuitId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The value to be inserted into the Agent Circuit ID field of the PPPoX tag.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AgentCircuitId']))
@property
def AgentRemoteId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The value to be inserted into the Agent Remote ID field of the PPPoX tag.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AgentRemoteId']))
@property
def AuthRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of PPP authentication retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AuthRetries']))
@property
def AuthTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Timeout for PPP authentication, in seconds.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AuthTimeout']))
@property
def AuthType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The authentication type to use during link setup.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AuthType']))
@property
def ChapName(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): User name when CHAP Authentication is being used
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ChapName']))
@property
def ChapSecret(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Secret when CHAP Authentication is being used
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ChapSecret']))
@property
def ClientDnsOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The client DNS options.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientDnsOptions']))
@property
def ClientLocalIp(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The requested IPv4 address.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientLocalIp']))
@property
def ClientLocalIpv6Iid(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The requested IPv6 Interface Identifier (IID).
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientLocalIpv6Iid']))
@property
def ClientNcpOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The NCP configuration mode for IPv4 addressing.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientNcpOptions']))
@property
def ClientNetmask(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The netmask that the client will use with the assigned IP address.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientNetmask']))
@property
def ClientNetmaskOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The client netmask option.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientNetmaskOptions']))
@property
def ClientPrimaryDnsAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This is the primary DNS server address that the client requests from the server when the value of the Client DNS Options field is set to 'Request Primary only' or 'Request Primary and Secondary'.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientPrimaryDnsAddress']))
@property
def ClientSecondaryDnsAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This is the secondary DNS server address that the client requests from the server when the value of the Client DNS Options field is set to 'Request Primary and Secondary'.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientSecondaryDnsAddress']))
@property
def ClientSignalIWF(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter enables or disables the insertion of sub-option 0xFE (signaling of interworked sessions) into the DSL tag in PADI and PADR packets.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientSignalIWF']))
@property
def ClientSignalLoopChar(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter enables or disables the insertion of sub-options 0x81 and 0x82 into the DSL tag in PADI and PADR packets.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientSignalLoopChar']))
@property
def ClientSignalLoopEncapsulation(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter enables or disables the insertion of sub-option 0x90 into the DSL tag in PADI and PADR packets.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientSignalLoopEncapsulation']))
@property
def ClientSignalLoopId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This parameter enables or disables the insertion of sub-options 0x01 , 0x02, 0x03 (Remote ID,Circuit ID and Access Aggregation Circuit ID) into the DSL tag in PADI and PADR packets.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientSignalLoopId']))
@property
def ClientV6NcpOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The NCP configuration mode for IPv6 addressing.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientV6NcpOptions']))
@property
def ClientWinsOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Specifies the mode in which WINS host addresses are configured.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientWinsOptions']))
@property
def ClientWinsPrimaryAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Specifies the primary WINS address.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientWinsPrimaryAddress']))
@property
def ClientWinsSecondaryAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Specifies the secondary WINS address.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ClientWinsSecondaryAddress']))
@property
def ConnectSpeedUpdateEnable(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): If checked, LAC will send Connect Speed Update Enable AVP in ICRQ control message
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ConnectSpeedUpdateEnable']))
@property
def ConnectedVia(self):
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def Count(self):
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DataLink(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): A one-byte field included with sub-option 0x90.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DataLink']))
@property
def DescriptiveName(self):
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def DiscoveredIpv4Addresses(self):
"""
Returns
-------
- list(str): The discovered IPv4 addresses.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredIpv4Addresses'])
@property
def DiscoveredIpv6Addresses(self):
"""
Returns
-------
- list(str): The discovered IPv6 addresses.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredIpv6Addresses'])
@property
def DiscoveredMacs(self):
"""
Returns
-------
- list(str): The discovered remote MAC address.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredMacs'])
@property
def DiscoveredRemoteSessionIds(self):
"""
Returns
-------
- list(number): Remote session ID.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredRemoteSessionIds'])
@property
def DiscoveredRemoteTunnelIds(self):
"""
Returns
-------
- list(number): Remote tunnel ID.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredRemoteTunnelIds'])
@property
def DiscoveredSessionIds(self):
"""
Returns
-------
- list(number): The negotiated session ID.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredSessionIds'])
@property
def DiscoveredTunnelIPs(self):
"""
Returns
-------
- list(str): The discovered remote tunnel IP.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredTunnelIPs'])
@property
def DiscoveredTunnelIds(self):
"""
Returns
-------
- list(number): The negotiated tunnel ID.
"""
return self._get_attribute(self._SDM_ATT_MAP['DiscoveredTunnelIds'])
@property
def DomainList(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Configure domain group settings
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DomainList']))
@property
def DslTypeTlv(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): DSL Type to be advertised in PPPoE VSA Tag. For undefined DSL type user has to select User-defined DSL Type.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DslTypeTlv']))
@property
def EchoReqInterval(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Keep alive interval, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EchoReqInterval']))
@property
def EnableDomainGroups(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable domain groups
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableDomainGroups']))
@property
def EnableEchoReq(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): ?
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableEchoReq']))
@property
def EnableEchoRsp(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): ?
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableEchoRsp']))
@property
def EnableHostUniq(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enables PPPoE Host-Uniq tag
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableHostUniq']))
@property
def EnableMaxPayload(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enables PPPoE Max Payload tag
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableMaxPayload']))
@property
def EnableRedial(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): If checked, PPPoE redial is enabled
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableRedial']))
@property
def Encaps1(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): A one-byte field included with sub-option 0x90.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Encaps1']))
@property
def Encaps2(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): A one-byte field included with sub-option 0x90.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Encaps2']))
@property
def EndpointDiscNegotiation(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable Endpoint Discriminator Negotiation
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EndpointDiscNegotiation']))
@property
def EndpointDiscriminatorClass(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Endpoint Discriminator for PPP
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EndpointDiscriminatorClass']))
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def HostUniq(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates Host-Uniq Tag
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HostUniq']))
@property
def HostUniqLength(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Host-Uniq Length, in bytes
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HostUniqLength']))
@property
def LcpAccm(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Async-Control-Character-Map
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpAccm']))
@property
def LcpEnableAccm(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable Async-Control-Character-Map
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpEnableAccm']))
@property
def LcpMaxFailure(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of Configure-Nak packets sent without sending a Configure-Ack before assuming that configuration is not converging. Any further Configure-Nak packets for peer requested options are converted to Configure-Reject packets
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpMaxFailure']))
@property
def LcpRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of LCP retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpRetries']))
@property
def LcpStartDelay(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Delay time in milliseconds to wait before sending LCP Config Request packet
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpStartDelay']))
@property
def LcpTermRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of LCP Termination Retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpTermRetries']))
@property
def LcpTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Timeout for LCP phase, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LcpTimeout']))
@property
def MaxPayload(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Max Payload
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MaxPayload']))
@property
def MlpppIPAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The IP address used in the ML-PPP endpoint discriminator option of the LCP configure request sent by PPP clients
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MlpppIPAddress']))
@property
def MlpppMACAddress(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The MAC addresses are automatically derived from the local MAC address. An address in this class contains an IEEE 802.1 MAC address is canonical (802.3) format
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MlpppMACAddress']))
@property
def Mrru(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Max Receive Reconstructed Unit for PPP
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Mrru']))
@property
def MrruNegotiation(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable MRRU Negotiation
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MrruNegotiation']))
@property
def MruNegotiation(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable MRU Negotiation
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MruNegotiation']))
@property
def Mtu(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Max Transmit Unit for PPP
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Mtu']))
@property
def Multiplier(self):
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NcpRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of NCP retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['NcpRetries']))
@property
def NcpTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Timeout for NCP phase, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['NcpTimeout']))
@property
def NcpType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): IP address type (IPv4 or IPv6) for Network Control Protocol
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['NcpType']))
@property
def PadiRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of PADI Retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PadiRetries']))
@property
def PadiTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Timeout for PADI no response, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PadiTimeout']))
@property
def PadrRetries(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Number of PADR Retries
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PadrRetries']))
@property
def PadrTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Timeout for PADR no response, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PadrTimeout']))
@property
def PapPassword(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Password when PAP Authentication is being used
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PapPassword']))
@property
def PapUser(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): User name when PAP Authentication is being used
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PapUser']))
@property
def PonTypeTlv(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): PON Type to be advertised in PPPoE VSA Tag. For undefined PON type user has to select User-defined PON Type.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PonTypeTlv']))
@property
def RedialMax(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Maximum number of PPPoE redials
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RedialMax']))
@property
def RedialTimeout(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): PPPoE redial timeout, in seconds
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RedialTimeout']))
@property
def RxConnectSpeed(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Rx Connection Speed
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RxConnectSpeed']))
@property
def ServiceName(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Access Concentrator Service Name - this option is only available for PPP servers.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ServiceName']))
@property
def ServiceOptions(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Indicates PPPoE service retrieval mode
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ServiceOptions']))
@property
def SessionInfo(self):
"""
Returns
-------
- list(str[cLS_CFG_REJ_AUTH | cLS_CHAP_PEER_DET_FAIL | cLS_CHAP_PEER_RESP_BAD | cLS_CODE_REJ_IPCP | cLS_CODE_REJ_IPV6CP | cLS_CODE_REJ_LCP | cLS_ERR_PPP_NO_BUF | cLS_ERR_PPP_SEND_PKT | cLS_LINK_DISABLE | cLS_LOC_IPADDR_BROADCAST | cLS_LOC_IPADDR_CLASS_E | cLS_LOC_IPADDR_INVAL_ACKS_0 | cLS_LOC_IPADDR_INVAL_ACKS_DIFF | cLS_LOC_IPADDR_LOOPBACK | cLS_LOC_IPADDR_PEER_MATCH_LOC | cLS_LOC_IPADDR_PEER_NO_GIVE | cLS_LOC_IPADDR_PEER_NO_HELP | cLS_LOC_IPADDR_PEER_NO_TAKE | cLS_LOC_IPADDR_PEER_REJ | cLS_LOOPBACK_DETECT | cLS_NO_NCP | cLS_NONE | cLS_PAP_BAD_PASSWD | cLS_PEER_DISCONNECTED | cLS_PEER_DISCONNECTED_NEGO | cLS_PEER_IPADDR_MATCH_LOC | cLS_PEER_IPADDR_PEER_NO_SET | cLS_PPOE_AC_SYSTEM_ERROR | cLS_PPOE_GENERIC_ERROR | cLS_PPP_DISABLE | cLS_PPPOE_NO_HOST_UNIQ | cLS_PPPOE_PADI_TIMEOUT | cLS_PPPOE_PADO_TIMEOUT | cLS_PPPOE_PADR_TIMEOUT | cLS_PROTO_REJ_IPCP | cLS_PROTO_REJ_IPv6CP | cLS_TIMEOUT_CHAP_CHAL | cLS_TIMEOUT_CHAP_RESP | cLS_TIMEOUT_IPCP_CFG_REQ | cLS_TIMEOUT_IPV6CP_CFG_REQ | cLS_TIMEOUT_IPV6CP_RA | cLS_TIMEOUT_LCP_CFG_REQ | cLS_TIMEOUT_LCP_ECHO_REQ | cLS_TIMEOUT_PAP_AUTH_REQ | cLS_TUN_AUTH_FAILED | cLS_TUN_NO_RESOURCES | cLS_TUN_TIMEOUT_ICRQ | cLS_TUN_TIMEOUT_SCCRQ | cLS_TUN_VENDOR_SPECIFIC_ERR]): Logs additional information about the session state
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionInfo'])
@property
def SessionStatus(self):
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def StackedLayers(self):
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
@property
def TxConnectSpeed(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Tx Connection Speed
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TxConnectSpeed']))
@property
def UnlimitedRedialAttempts(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): If checked, PPPoE unlimited redial attempts is enabled
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['UnlimitedRedialAttempts']))
@property
def UserDefinedDslType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): User Defined DSL-Type Value.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['UserDefinedDslType']))
@property
def UserDefinedPonType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): User Defined PON-Type Value.
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['UserDefinedPonType']))
def update(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Updates pppoxclient resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Adds a new pppoxclient resource on the server and adds it to the container.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Returns
-------
- self: This instance with all currently retrieved pppoxclient resources using find and the newly added pppoxclient resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained pppoxclient resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, Count=None, DescriptiveName=None, DiscoveredIpv4Addresses=None, DiscoveredIpv6Addresses=None, DiscoveredMacs=None, DiscoveredRemoteSessionIds=None, DiscoveredRemoteTunnelIds=None, DiscoveredSessionIds=None, DiscoveredTunnelIPs=None, DiscoveredTunnelIds=None, Errors=None, Multiplier=None, Name=None, SessionInfo=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None):
"""Finds and retrieves pppoxclient resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve pppoxclient resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all pppoxclient resources from the server.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- DiscoveredIpv4Addresses (list(str)): The discovered IPv4 addresses.
- DiscoveredIpv6Addresses (list(str)): The discovered IPv6 addresses.
- DiscoveredMacs (list(str)): The discovered remote MAC address.
- DiscoveredRemoteSessionIds (list(number)): Remote session ID.
- DiscoveredRemoteTunnelIds (list(number)): Remote tunnel ID.
- DiscoveredSessionIds (list(number)): The negotiated session ID.
- DiscoveredTunnelIPs (list(str)): The discovered remote tunnel IP.
- DiscoveredTunnelIds (list(number)): The negotiated tunnel ID.
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- SessionInfo (list(str[cLS_CFG_REJ_AUTH | cLS_CHAP_PEER_DET_FAIL | cLS_CHAP_PEER_RESP_BAD | cLS_CODE_REJ_IPCP | cLS_CODE_REJ_IPV6CP | cLS_CODE_REJ_LCP | cLS_ERR_PPP_NO_BUF | cLS_ERR_PPP_SEND_PKT | cLS_LINK_DISABLE | cLS_LOC_IPADDR_BROADCAST | cLS_LOC_IPADDR_CLASS_E | cLS_LOC_IPADDR_INVAL_ACKS_0 | cLS_LOC_IPADDR_INVAL_ACKS_DIFF | cLS_LOC_IPADDR_LOOPBACK | cLS_LOC_IPADDR_PEER_MATCH_LOC | cLS_LOC_IPADDR_PEER_NO_GIVE | cLS_LOC_IPADDR_PEER_NO_HELP | cLS_LOC_IPADDR_PEER_NO_TAKE | cLS_LOC_IPADDR_PEER_REJ | cLS_LOOPBACK_DETECT | cLS_NO_NCP | cLS_NONE | cLS_PAP_BAD_PASSWD | cLS_PEER_DISCONNECTED | cLS_PEER_DISCONNECTED_NEGO | cLS_PEER_IPADDR_MATCH_LOC | cLS_PEER_IPADDR_PEER_NO_SET | cLS_PPOE_AC_SYSTEM_ERROR | cLS_PPOE_GENERIC_ERROR | cLS_PPP_DISABLE | cLS_PPPOE_NO_HOST_UNIQ | cLS_PPPOE_PADI_TIMEOUT | cLS_PPPOE_PADO_TIMEOUT | cLS_PPPOE_PADR_TIMEOUT | cLS_PROTO_REJ_IPCP | cLS_PROTO_REJ_IPv6CP | cLS_TIMEOUT_CHAP_CHAL | cLS_TIMEOUT_CHAP_RESP | cLS_TIMEOUT_IPCP_CFG_REQ | cLS_TIMEOUT_IPV6CP_CFG_REQ | cLS_TIMEOUT_IPV6CP_RA | cLS_TIMEOUT_LCP_CFG_REQ | cLS_TIMEOUT_LCP_ECHO_REQ | cLS_TIMEOUT_PAP_AUTH_REQ | cLS_TUN_AUTH_FAILED | cLS_TUN_NO_RESOURCES | cLS_TUN_TIMEOUT_ICRQ | cLS_TUN_TIMEOUT_SCCRQ | cLS_TUN_VENDOR_SPECIFIC_ERR])): Logs additional information about the session state
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns
-------
- self: This instance with matching pppoxclient resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of pppoxclient data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the pppoxclient resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, AcMatchMac=None, AcMatchName=None, AcOptions=None, ActualRateDownstream=None, ActualRateUpstream=None, AgentAccessAggregationCircuitId=None, AgentCircuitId=None, AgentRemoteId=None, AuthRetries=None, AuthTimeout=None, AuthType=None, ChapName=None, ChapSecret=None, ClientDnsOptions=None, ClientLocalIp=None, ClientLocalIpv6Iid=None, ClientNcpOptions=None, ClientNetmask=None, ClientNetmaskOptions=None, ClientPrimaryDnsAddress=None, ClientSecondaryDnsAddress=None, ClientSignalIWF=None, ClientSignalLoopChar=None, ClientSignalLoopEncapsulation=None, ClientSignalLoopId=None, ClientV6NcpOptions=None, ClientWinsOptions=None, ClientWinsPrimaryAddress=None, ClientWinsSecondaryAddress=None, ConnectSpeedUpdateEnable=None, DataLink=None, DomainList=None, DslTypeTlv=None, EchoReqInterval=None, EnableDomainGroups=None, EnableEchoReq=None, EnableEchoRsp=None, EnableHostUniq=None, EnableMaxPayload=None, EnableRedial=None, Encaps1=None, Encaps2=None, EndpointDiscNegotiation=None, EndpointDiscriminatorClass=None, HostUniq=None, HostUniqLength=None, LcpAccm=None, LcpEnableAccm=None, LcpMaxFailure=None, LcpRetries=None, LcpStartDelay=None, LcpTermRetries=None, LcpTimeout=None, MaxPayload=None, MlpppIPAddress=None, MlpppMACAddress=None, Mrru=None, MrruNegotiation=None, MruNegotiation=None, Mtu=None, NcpRetries=None, NcpTimeout=None, NcpType=None, PadiRetries=None, PadiTimeout=None, PadrRetries=None, PadrTimeout=None, PapPassword=None, PapUser=None, PonTypeTlv=None, RedialMax=None, RedialTimeout=None, RxConnectSpeed=None, ServiceName=None, ServiceOptions=None, TxConnectSpeed=None, UnlimitedRedialAttempts=None, UserDefinedDslType=None, UserDefinedPonType=None):
"""Base class infrastructure that gets a list of pppoxclient device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- AcMatchMac (str): optional regex of acMatchMac
- AcMatchName (str): optional regex of acMatchName
- AcOptions (str): optional regex of acOptions
- ActualRateDownstream (str): optional regex of actualRateDownstream
- ActualRateUpstream (str): optional regex of actualRateUpstream
- AgentAccessAggregationCircuitId (str): optional regex of agentAccessAggregationCircuitId
- AgentCircuitId (str): optional regex of agentCircuitId
- AgentRemoteId (str): optional regex of agentRemoteId
- AuthRetries (str): optional regex of authRetries
- AuthTimeout (str): optional regex of authTimeout
- AuthType (str): optional regex of authType
- ChapName (str): optional regex of chapName
- ChapSecret (str): optional regex of chapSecret
- ClientDnsOptions (str): optional regex of clientDnsOptions
- ClientLocalIp (str): optional regex of clientLocalIp
- ClientLocalIpv6Iid (str): optional regex of clientLocalIpv6Iid
- ClientNcpOptions (str): optional regex of clientNcpOptions
- ClientNetmask (str): optional regex of clientNetmask
- ClientNetmaskOptions (str): optional regex of clientNetmaskOptions
- ClientPrimaryDnsAddress (str): optional regex of clientPrimaryDnsAddress
- ClientSecondaryDnsAddress (str): optional regex of clientSecondaryDnsAddress
- ClientSignalIWF (str): optional regex of clientSignalIWF
- ClientSignalLoopChar (str): optional regex of clientSignalLoopChar
- ClientSignalLoopEncapsulation (str): optional regex of clientSignalLoopEncapsulation
- ClientSignalLoopId (str): optional regex of clientSignalLoopId
- ClientV6NcpOptions (str): optional regex of clientV6NcpOptions
- ClientWinsOptions (str): optional regex of clientWinsOptions
- ClientWinsPrimaryAddress (str): optional regex of clientWinsPrimaryAddress
- ClientWinsSecondaryAddress (str): optional regex of clientWinsSecondaryAddress
- ConnectSpeedUpdateEnable (str): optional regex of connectSpeedUpdateEnable
- DataLink (str): optional regex of dataLink
- DomainList (str): optional regex of domainList
- DslTypeTlv (str): optional regex of dslTypeTlv
- EchoReqInterval (str): optional regex of echoReqInterval
- EnableDomainGroups (str): optional regex of enableDomainGroups
- EnableEchoReq (str): optional regex of enableEchoReq
- EnableEchoRsp (str): optional regex of enableEchoRsp
- EnableHostUniq (str): optional regex of enableHostUniq
- EnableMaxPayload (str): optional regex of enableMaxPayload
- EnableRedial (str): optional regex of enableRedial
- Encaps1 (str): optional regex of encaps1
- Encaps2 (str): optional regex of encaps2
- EndpointDiscNegotiation (str): optional regex of endpointDiscNegotiation
- EndpointDiscriminatorClass (str): optional regex of endpointDiscriminatorClass
- HostUniq (str): optional regex of hostUniq
- HostUniqLength (str): optional regex of hostUniqLength
- LcpAccm (str): optional regex of lcpAccm
- LcpEnableAccm (str): optional regex of lcpEnableAccm
- LcpMaxFailure (str): optional regex of lcpMaxFailure
- LcpRetries (str): optional regex of lcpRetries
- LcpStartDelay (str): optional regex of lcpStartDelay
- LcpTermRetries (str): optional regex of lcpTermRetries
- LcpTimeout (str): optional regex of lcpTimeout
- MaxPayload (str): optional regex of maxPayload
- MlpppIPAddress (str): optional regex of mlpppIPAddress
- MlpppMACAddress (str): optional regex of mlpppMACAddress
- Mrru (str): optional regex of mrru
- MrruNegotiation (str): optional regex of mrruNegotiation
- MruNegotiation (str): optional regex of mruNegotiation
- Mtu (str): optional regex of mtu
- NcpRetries (str): optional regex of ncpRetries
- NcpTimeout (str): optional regex of ncpTimeout
- NcpType (str): optional regex of ncpType
- PadiRetries (str): optional regex of padiRetries
- PadiTimeout (str): optional regex of padiTimeout
- PadrRetries (str): optional regex of padrRetries
- PadrTimeout (str): optional regex of padrTimeout
- PapPassword (str): optional regex of papPassword
- PapUser (str): optional regex of papUser
- PonTypeTlv (str): optional regex of ponTypeTlv
- RedialMax (str): optional regex of redialMax
- RedialTimeout (str): optional regex of redialTimeout
- RxConnectSpeed (str): optional regex of rxConnectSpeed
- ServiceName (str): optional regex of serviceName
- ServiceOptions (str): optional regex of serviceOptions
- TxConnectSpeed (str): optional regex of txConnectSpeed
- UnlimitedRedialAttempts (str): optional regex of unlimitedRedialAttempts
- UserDefinedDslType (str): optional regex of userDefinedDslType
- UserDefinedPonType (str): optional regex of userDefinedPonType
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
def Abort(self, *args, **kwargs):
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
abort(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def CloseIpcp(self, *args, **kwargs):
"""Executes the closeIpcp operation on the server.
Close IPCP for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
closeIpcp(SessionIndices=list)list
----------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
closeIpcp(SessionIndices=string)list
------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('closeIpcp', payload=payload, response_object=None)
def CloseIpv6cp(self, *args, **kwargs):
"""Executes the closeIpv6cp operation on the server.
Close IPv6CP for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
closeIpv6cp(SessionIndices=list)list
------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
closeIpv6cp(SessionIndices=string)list
--------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('closeIpv6cp', payload=payload, response_object=None)
def OpenIpcp(self, *args, **kwargs):
"""Executes the openIpcp operation on the server.
Open IPCP for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
openIpcp(SessionIndices=list)list
---------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
openIpcp(SessionIndices=string)list
-----------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('openIpcp', payload=payload, response_object=None)
def OpenIpv6cp(self, *args, **kwargs):
"""Executes the openIpv6cp operation on the server.
Open IPv6CP for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
openIpv6cp(SessionIndices=list)list
-----------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
openIpv6cp(SessionIndices=string)list
-------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('openIpv6cp', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(SessionIndices=list)
--------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
restartDown(SessionIndices=string)
----------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def SendPing(self, *args, **kwargs):
"""Executes the sendPing operation on the server.
Send Ping IPv4 for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
sendPing(DestIp=string)list
---------------------------
- DestIp (str): This parameter requires a destIp of type kString
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
sendPing(DestIp=string, SessionIndices=list)list
------------------------------------------------
- DestIp (str): This parameter requires a destIp of type kString
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
sendPing(SessionIndices=string, DestIp=string)list
--------------------------------------------------
- SessionIndices (str): This parameter requires a destIp of type kString
- DestIp (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('sendPing', payload=payload, response_object=None)
def SendPing6(self, *args, **kwargs):
"""Executes the sendPing6 operation on the server.
Send Ping IPv6 for selected PPPoX items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
sendPing6(DestIp=string)list
----------------------------
- DestIp (str): This parameter requires a destIp of type kString
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
sendPing6(DestIp=string, SessionIndices=list)list
-------------------------------------------------
- DestIp (str): This parameter requires a destIp of type kString
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
sendPing6(SessionIndices=string, DestIp=string)list
---------------------------------------------------
- SessionIndices (str): This parameter requires a destIp of type kString
- DestIp (str): This parameter requires a string of session numbers 1-4;6;7-12
- Returns list(dict(port:str[None | /api/v1/sessions/1/ixnetwork/vport],isSuccess:bool,data:str)): The return value is an array of structures where each structure consists of a /vport object reference, the success of the operation and the returned data of the operation for that /vport. This exec is not asynchronous.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('sendPing6', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
start(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(SessionIndices=list)
-------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
stop(SessionIndices=string)
---------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
| 44.831714 | 1,710 | 0.652557 | 9,158 | 87,646 | 6.116947 | 0.078183 | 0.03406 | 0.053589 | 0.031864 | 0.660883 | 0.645495 | 0.631536 | 0.614184 | 0.558525 | 0.496671 | 0 | 0.0213 | 0.24899 | 87,646 | 1,954 | 1,711 | 44.854657 | 0.829756 | 0.494501 | 0 | 0.354103 | 0 | 0 | 0.127922 | 0.030768 | 0 | 0 | 0 | 0 | 0 | 1 | 0.220365 | false | 0.006079 | 0.162614 | 0 | 0.600304 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
8120d40063abd2e8f6e2a7d3cc01ed85e374088c | 67,820 | py | Python | xanalysis_realsph_paper_figs.py | kseetharam/genPolaron | b4eb05c595f1dc7151aa564f56fcfbdeded570c5 | [
"MIT"
] | null | null | null | xanalysis_realsph_paper_figs.py | kseetharam/genPolaron | b4eb05c595f1dc7151aa564f56fcfbdeded570c5 | [
"MIT"
] | null | null | null | xanalysis_realsph_paper_figs.py | kseetharam/genPolaron | b4eb05c595f1dc7151aa564f56fcfbdeded570c5 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import xarray as xr
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.lines import Line2D
from matplotlib.patches import Patch, Ellipse, Circle
from matplotlib.legend_handler import HandlerPatch
import matplotlib.colors as colors
from matplotlib.ticker import FormatStrFormatter
import os
import itertools
import pf_dynamic_cart as pfc
import pf_dynamic_sph as pfs
import pf_static_sph as pss
import Grid
import warnings
from scipy import interpolate
from scipy.optimize import curve_fit, OptimizeWarning, fsolve
from scipy.integrate import simps
import scipy.stats as ss
from timeit import default_timer as timer
from copy import copy
from matplotlib.ticker import NullFormatter
import colors as col
if __name__ == "__main__":
# # Initialization
# matplotlib.rcParams.update({'font.size': 12, 'text.usetex': True})
mpegWriter = animation.writers['ffmpeg'](fps=2, bitrate=1800)
# plt.rcParams['animation.ffmpeg_path'] = '/usr/bin/ffmpeg'
# Writer = animation.writers['ffmpeg']
# mpegWriter = Writer(fps=20, metadata=dict(artist='Me'), bitrate=1800)
matplotlib.rcParams.update({'font.size': 16, 'font.family': 'Times New Roman', 'text.usetex': True, 'mathtext.fontset': 'dejavuserif'})
higherCutoff = False
cutoffRat = 1.0
betterResolution = False
resRat = 1.0
# ---- INITIALIZE GRIDS ----
(Lx, Ly, Lz) = (60, 60, 60)
(dx, dy, dz) = (0.25, 0.25, 0.25)
higherCutoff = False
cutoffRat = 1.5
betterResolution = True
resRat = 0.5
# (Lx, Ly, Lz) = (40, 40, 40)
# (dx, dy, dz) = (0.25, 0.25, 0.25)
# (Lx, Ly, Lz) = (21, 21, 21)
# (dx, dy, dz) = (0.375, 0.375, 0.375)
NGridPoints_cart = (1 + 2 * Lx / dx) * (1 + 2 * Ly / dy) * (1 + 2 * Lz / dz)
# NGridPoints_cart = 1.37e5
k_max = ((2 * np.pi / dx)**3 / (4 * np.pi / 3))**(1 / 3)
linDimMajor = 0.99 * (k_max * np.sqrt(2) / 2)
linDimMinor = linDimMajor
massRat = 1.0
IRrat = 1
# git test
# Toggle parameters
toggleDict = {'Dynamics': 'real', 'Interaction': 'on', 'Grid': 'spherical', 'Coupling': 'twophonon', 'noCSAmp': True}
# ---- SET OUTPUT DATA FOLDER ----
datapath = '/Users/kis/Dropbox/VariationalResearch/HarvardOdyssey/genPol_data/NGridPoints_{:.2E}'.format(NGridPoints_cart)
animpath = '/Users/kis/Dropbox/VariationalResearch/DataAnalysis/figs'
if higherCutoff is True:
datapath = datapath + '_cutoffRat_{:.2f}'.format(cutoffRat)
if betterResolution is True:
datapath = datapath + '_resRat_{:.2f}'.format(resRat)
datapath = datapath + '/massRatio={:.1f}'.format(massRat)
distdatapath = copy(datapath)
if toggleDict['noCSAmp'] is True:
datapath = datapath + '_noCSAmp'
innerdatapath = datapath + '/redyn_spherical'
distdatapath = distdatapath + '/redyn_spherical'
if toggleDict['Coupling'] == 'frohlich':
innerdatapath = innerdatapath + '_froh_new'
distdatapath = distdatapath + '_froh'
animpath = animpath + '/rdyn_frohlich'
else:
animpath = animpath + '/rdyn_twophonon'
# figdatapath = '/Users/kis/Dropbox/Apps/Overleaf/Quantum Cherenkov Transition in Bose Polaron Systems/figures/figdump'
figdatapath = '/Users/kis/Dropbox/Apps/Overleaf/Cherenkov Polaron Paper pt1/figures/figdump'
# # Analysis of Total Dataset
aIBi = -10
qds = xr.open_dataset(innerdatapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
qds_aIBi = qds
PVals = qds['P'].values
tVals = qds['t'].values
n0 = qds.attrs['n0']
gBB = qds.attrs['gBB']
mI = qds.attrs['mI']
mB = qds.attrs['mB']
nu = np.sqrt(n0 * gBB / mB)
mc = mI * nu
aBB = (mB / (4 * np.pi)) * gBB
xi = (8 * np.pi * n0 * aBB)**(-1 / 2)
tscale = xi / nu
Pnorm = PVals / mc
kArray = qds.coords['k'].values
k0 = kArray[0]
kf = kArray[-1]
print(aIBi * xi)
print(mI / mB, IRrat)
IR_lengthscale = 1 / (k0 / (2 * np.pi)) / xi
UV_lengthscale = 1 / (kf / (2 * np.pi)) / xi
print(k0, 1 / IR_lengthscale, IR_lengthscale)
print(kf, 1 / UV_lengthscale, UV_lengthscale)
# aIBi_Vals = np.array([-10.0, -5.0, -2.0, -1.0, -0.75, -0.5])
aIBi_Vals = np.array([-10.0, -5.0, -2.0])
kgrid = Grid.Grid("SPHERICAL_2D")
kgrid.initArray_premade('k', qds.coords['k'].values)
kgrid.initArray_premade('th', qds.coords['th'].values)
kVals = kgrid.getArray('k')
wk_Vals = pfs.omegak(kVals, mB, n0, gBB)
bdiff = 100 * np.abs(wk_Vals - nu * kVals) / (nu * kVals)
kind = np.abs(bdiff - 1).argmin().astype(int)
klin = kVals[kind]
tlin = 2 * np.pi / (nu * kVals[kind])
tlin_norm = tlin / tscale
print(klin, tlin_norm)
print(90 / tscale, 100 / tscale)
print(kVals[-1], kVals[1] - kVals[0])
print(qds.attrs['k_mag_cutoff'] * xi)
print('Np: {0}'.format(qds.coords['k'].values.size * qds.coords['th'].values.size))
# # # # # # #############################################################################################################################
# # # # # # FIG 3 - S(t) CURVES - PRL
# # # # # #############################################################################################################################
# red = col.red.ashexstring()
# green = col.green.ashexstring()
# blue = col.blue.ashexstring()
# colorList = [red, green, blue]
# matplotlib.rcParams.update({'font.size': 12})
# tailFit = True
# logScale = True
# PimpData_roll = False; PimpData_rollwin = 2
# longTime = True
# # tau = 100; tfCutoff = 90; tfstart = 10
# tau = 300; tfCutoff = 200; tfstart = 10
# aIBi_weak = -10.0
# print(aIBi_weak * xi)
# if longTime:
# innerdatapath_longtime = datapath + '_longtime/redyn_spherical'
# qds_w = xr.open_dataset(innerdatapath_longtime + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_weak))
# else:
# qds_w = xr.open_dataset(innerdatapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_weak))
# tVals = qds_w['t'].values
# tsVals = tVals[tVals < tau]
# qds_aIBi_ts_w = qds_w.sel(t=tsVals)
# Pnorm_des = np.array([0.5, 2.2])
# Pinds = np.zeros(Pnorm_des.size, dtype=int)
# for Pn_ind, Pn in enumerate(Pnorm_des):
# Pinds[Pn_ind] = np.abs(Pnorm - Pn).argmin().astype(int)
# fig, ax = plt.subplots()
# for ip, indP in enumerate(Pinds):
# P = PVals[indP]
# DynOv_w = np.abs(qds_aIBi_ts_w.isel(P=indP)['Real_DynOv'].values + 1j * qds_aIBi_ts_w.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# Pph_ds_w = xr.DataArray(qds_aIBi_ts_w.isel(P=indP)['Pph'].values, coords=[tsVals], dims=['t'])
# if PimpData_roll:
# Pph_ds_w = Pph_ds_w.rolling(t=PimpData_rollwin, center=True).mean().dropna('t')
# vImp_Vals_w = (P - Pph_ds_w.values) / mI
# tvImp_Vals_w = Pph_ds_w['t'].values
# if tailFit is True:
# tfmask = tsVals > tfCutoff
# tfVals = tsVals[tfmask]
# tfLin = tsVals[tsVals > tfstart]
# zD = np.polyfit(np.log(tfVals), np.log(DynOv_w[tfmask]), deg=1)
# if longTime:
# tfLin_plot = tVals[tVals > tfstart]
# else:
# tfLin_plot = tfLin
# fLinD_plot = np.exp(zD[1]) * tfLin_plot**(zD[0])
# ax.plot(tfLin_plot / tscale, fLinD_plot, 'k--', label='')
# if longTime:
# DynOv_w_plot = np.abs(qds_w.isel(P=indP)['Real_DynOv'].values + 1j * qds_w.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# ax.plot(tVals / tscale, DynOv_w_plot, label='{:.2f}'.format(P / mc), lw=3, color=colorList[ip])
# else:
# ax.plot(tsVals / tscale, DynOv_w, label='{:.2f}'.format(P / mc))
# ax.set_ylabel(r'$|S(t)|$', fontsize=18)
# ax.set_xlabel(r'$t/(\xi c^{-1})$', fontsize=18)
# if logScale is True:
# ax.set_xscale('log')
# ax.set_yscale('log')
# ax.tick_params(which='both', direction='in', right=True, top=True)
# ax.tick_params(which='major', length=6, width=1)
# ax.tick_params(which='minor', length=3, width=1)
# ax.tick_params(axis='x', which='major', pad=10)
# ax.tick_params(axis='both', which='major', labelsize=17)
# ax.tick_params(axis='both', which='minor', labelsize=17)
# # ax.legend(title=r'$v_{\rm imp}(t_{0}) / c$')
# handles, labels = ax.get_legend_handles_labels()
# # fig.legend(handles, labels, title=r'$\langle v_{\rm imp}(t_{0})\rangle / c$', ncol=1, loc='center right', bbox_to_anchor=(0.11, 0.38)))
# fig.subplots_adjust(left=0.2, bottom=0.175, top=0.98, right=0.98)
# fig.legend(handles, labels, title=r'$v_{\rm imp}(t_{0}) / c$', loc=3, bbox_to_anchor=(0.25, 0.25), fontsize=18, title_fontsize=18)
# fig.set_size_inches(6, 3.9)
# filename = '/Fig3_PRL.pdf'
# fig.savefig(figdatapath + filename)
# # # # # # # #############################################################################################################################
# # # # # # # FIG SM3 - LETTER
# # # # # # #############################################################################################################################
# axl = matplotlib.rcParams['axes.linewidth']
# matplotlib.rcParams['axes.linewidth'] = 0.5 * axl
# matplotlib.rcParams.update({'font.size': 12})
# labelsize = 13
# legendsize = 12
# red = col.red.ashexstring()
# green = col.green.ashexstring()
# blue = col.blue.ashexstring()
# colorList = [green, red, blue]
# matplotlib.rcParams.update({'font.size': 12})
# # fig, ax = plt.subplots()
# fig = plt.figure(constrained_layout=False)
# gs = fig.add_gridspec(nrows=1, ncols=1, bottom=0.1, top=0.93, left=0.1, right=0.95)
# ax = fig.add_subplot(gs[0])
# qds = xr.open_dataset('/Users/kis/Dropbox/VariationalResearch/HarvardOdyssey/genPol_data/NGridPoints_1.11E+08_resRat_0.50/massRatio=1.0_noCSAmp/redyn_spherical' + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
# tVals = qds['t'].values
# DynOvExp_NegMask = False
# DynOvExp_Cut = False
# cut = 1e-4
# consecDetection = True
# consecSamples = 10
# def powerfunc(t, a, b):
# return b * t**(-1 * a)
# tmin = 90
# tmax = 100
# tfVals = tVals[(tVals <= tmax) * (tVals >= tmin)]
# rollwin = 1
# aIBi_des = np.array([-10.0, -5.0, -3.5, -2.5, -2.0, -1.75])
# massRat_des = np.array([1.0])
# datapath = '/Users/kis/Dropbox/VariationalResearch/HarvardOdyssey/genPol_data/NGridPoints_1.11E+08_resRat_0.50/massRatio=1.0_noCSAmp'
# massRat_des = np.array([0.5, 1.0, 2.0])
# mdatapaths = []
# for mR in massRat_des:
# if toggleDict['noCSAmp'] is True:
# mdatapaths.append(datapath[0:-11] + '{:.1f}_noCSAmp'.format(mR))
# else:
# mdatapaths.append(datapath[0:-3] + '{:.1f}_noCSAmp'.format(mR))
# if toggleDict['Dynamics'] != 'real' or toggleDict['Grid'] != 'spherical' or toggleDict['Coupling'] != 'twophonon':
# print('SETTING ERROR')
# Pcrit_da = xr.DataArray(np.full((massRat_des.size, aIBi_des.size), np.nan, dtype=float), coords=[massRat_des, aIBi_des], dims=['mRatio', 'aIBi'])
# for inda, aIBi in enumerate(aIBi_des):
# for indm, mRat in enumerate(massRat_des):
# mds = xr.open_dataset(mdatapaths[indm] + '/redyn_spherical/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
# Plen = mds.coords['P'].values.size
# Pstart_ind = 0
# PVals = mds.coords['P'].values[Pstart_ind:Plen]
# n0 = mds.attrs['n0']
# gBB = mds.attrs['gBB']
# mI = mds.attrs['mI']
# mB = mds.attrs['mB']
# nu = np.sqrt(n0 * gBB / mB)
# vI0_Vals = (PVals - mds.isel(t=0, P=np.arange(Pstart_ind, Plen))['Pph'].values) / mI
# mds_ts = mds.sel(t=tfVals)
# DynOv_Exponents = np.zeros(PVals.size)
# DynOv_Constants = np.zeros(PVals.size)
# for indP, P in enumerate(PVals):
# DynOv_raw = np.abs(mds_ts.isel(P=indP)['Real_DynOv'].values + 1j * mds_ts.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# DynOv_ds = xr.DataArray(DynOv_raw, coords=[tfVals], dims=['t'])
# # DynOv_ds = DynOv_ds.rolling(t=rollwin, center=True).mean().dropna('t')
# DynOv_Vals = DynOv_ds.values
# tDynOvc_Vals = DynOv_ds['t'].values
# S_slope, S_intercept, S_rvalue, S_pvalue, S_stderr = ss.linregress(np.log(tDynOvc_Vals), np.log(DynOv_Vals))
# DynOv_Exponents[indP] = -1 * S_slope
# DynOv_Constants[indP] = np.exp(S_intercept)
# if DynOvExp_NegMask:
# DynOv_Exponents[DynOv_Exponents < 0] = 0
# if DynOvExp_Cut:
# DynOv_Exponents[np.abs(DynOv_Exponents) < cut] = 0
# if consecDetection:
# crit_ind = 0
# for indE, exp in enumerate(DynOv_Exponents):
# if indE > DynOv_Exponents.size - consecDetection:
# break
# expSlice = DynOv_Exponents[indE:(indE + consecSamples)]
# if np.all(expSlice > 0):
# crit_ind = indE
# break
# DynOv_Exponents[0:crit_ind] = 0
# Pcrit_da[indm, inda] = PVals[crit_ind] / (mI * nu)
# DynOvf_Vals = powerfunc(1e1000, DynOv_Exponents, DynOv_Constants)
# for indm, mRat in enumerate(massRat_des):
# ax.plot(aIBi_des * xi, Pcrit_da.sel(mRatio=mRat).values, linestyle='None', marker='D', mec=colorList[indm], mfc=colorList[indm], mew=2, ms=5, label='{0}'.format(mRat))
# xmin = -10.1; xmax = -0.9
# # ymin = -0.1; ymax = 4.1
# ymin = 0.4; ymax = 4.1
# ax.tick_params(direction='in', right=True, top=True)
# ax.set_xlabel(r'$a_{\rm IB}^{-1}/\xi^{-1}$', fontsize=labelsize)
# ax.set_ylabel(r'Total Momentum $P/(m_{I}c)$', fontsize=labelsize)
# ax.set_xlim([xmin, xmax]); ax.set_ylim([ymin, ymax])
# ax.legend(title=r'$m_{I}/m_{B}$', loc=2)
# fig.set_size_inches(6, 4.5)
# # fig.set_size_inches(6, 3.9)
# filename = '/FigSM3_PRL.pdf'
# fig.savefig(figdatapath + filename)
# # # # # # #############################################################################################################################
# # # # # # OLD FIGS
# # # # # #############################################################################################################################
# # # # FIG 4 - S(t) AND v_Imp CURVES (WEAK AND STRONG INTERACTIONS)
# colorList = ['b', 'orange', 'g', 'r']
# matplotlib.rcParams.update({'font.size': 20})
# tailFit = True
# logScale = True
# PimpData_roll = False; PimpData_rollwin = 2
# longTime = True
# # tau = 100; tfCutoff = 90; tfstart = 10
# tau = 300; tfCutoff = 200; tfstart = 10
# aIBi_weak = -10.0
# aIBi_strong = -2
# print(aIBi_weak * xi, aIBi_strong * xi)
# if longTime:
# innerdatapath_longtime = datapath + '_longtime/redyn_spherical'
# qds_w = xr.open_dataset(innerdatapath_longtime + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_weak))
# qds_s = xr.open_dataset(innerdatapath_longtime + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_strong))
# else:
# qds_w = xr.open_dataset(innerdatapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_weak))
# qds_s = xr.open_dataset(innerdatapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi_strong))
# tVals = qds_w['t'].values
# tsVals = tVals[tVals < tau]
# qds_aIBi_ts_w = qds_w.sel(t=tsVals)
# qds_aIBi_ts_s = qds_s.sel(t=tsVals)
# # Pnorm_des = np.array([0.1, 0.5, 0.9, 1.4, 2.2, 3.0, 5.0])
# Pnorm_des = np.array([0.5, 0.98, 2.2, 3.0])
# Pinds = np.zeros(Pnorm_des.size, dtype=int)
# for Pn_ind, Pn in enumerate(Pnorm_des):
# Pinds[Pn_ind] = np.abs(Pnorm - Pn).argmin().astype(int)
# fig, axes = plt.subplots(nrows=2, ncols=2)
# for ip, indP in enumerate(Pinds):
# P = PVals[indP]
# DynOv_w = np.abs(qds_aIBi_ts_w.isel(P=indP)['Real_DynOv'].values + 1j * qds_aIBi_ts_w.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# Pph_ds_w = xr.DataArray(qds_aIBi_ts_w.isel(P=indP)['Pph'].values, coords=[tsVals], dims=['t'])
# if PimpData_roll:
# Pph_ds_w = Pph_ds_w.rolling(t=PimpData_rollwin, center=True).mean().dropna('t')
# vImp_Vals_w = (P - Pph_ds_w.values) / mI
# tvImp_Vals_w = Pph_ds_w['t'].values
# if tailFit is True:
# tfmask = tsVals > tfCutoff
# tfVals = tsVals[tfmask]
# tfLin = tsVals[tsVals > tfstart]
# zD = np.polyfit(np.log(tfVals), np.log(DynOv_w[tfmask]), deg=1)
# if longTime:
# tfLin_plot = tVals[tVals > tfstart]
# else:
# tfLin_plot = tfLin
# fLinD_plot = np.exp(zD[1]) * tfLin_plot**(zD[0])
# axes[0, 0].plot(tfLin_plot / tscale, fLinD_plot, 'k--', label='')
# if longTime:
# DynOv_w_plot = np.abs(qds_w.isel(P=indP)['Real_DynOv'].values + 1j * qds_w.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# axes[0, 0].plot(tVals / tscale, DynOv_w_plot, label='{:.2f}'.format(P / mc), lw=3, color=colorList[ip])
# else:
# axes[0, 0].plot(tsVals / tscale, DynOv_w, label='{:.2f}'.format(P / mc))
# axes[1, 0].plot(tvImp_Vals_w / tscale, vImp_Vals_w / nu, label='{:.2f}'.format(P / mc), lw=3, color=colorList[ip])
# DynOv_s = np.abs(qds_aIBi_ts_s.isel(P=indP)['Real_DynOv'].values + 1j * qds_aIBi_ts_s.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# Pph_ds_s = xr.DataArray(qds_aIBi_ts_s.isel(P=indP)['Pph'].values, coords=[tsVals], dims=['t'])
# if PimpData_roll:
# Pph_ds_s = Pph_ds_s.rolling(t=PimpData_rollwin, center=True).mean().dropna('t')
# vImp_Vals_s = (P - Pph_ds_s.values) / mI
# tvImp_Vals_s = Pph_ds_s['t'].values
# if tailFit is True:
# tfmask = tsVals > tfCutoff
# tfVals = tsVals[tfmask]
# tfLin = tsVals[tsVals > tfstart]
# zD = np.polyfit(np.log(tfVals), np.log(DynOv_s[tfmask]), deg=1)
# if longTime:
# tfLin_plot = tVals[tVals > tfstart]
# else:
# tfLin_plot = tfLin
# fLinD_plot = np.exp(zD[1]) * tfLin_plot**(zD[0])
# axes[0, 1].plot(tfLin_plot / tscale, fLinD_plot, 'k--', label='')
# if longTime:
# DynOv_s_plot = np.abs(qds_s.isel(P=indP)['Real_DynOv'].values + 1j * qds_s.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# axes[0, 1].plot(tVals / tscale, DynOv_s_plot, label='{:.2f}'.format(P / mc), lw=3, color=colorList[ip])
# else:
# axes[0, 1].plot(tsVals / tscale, DynOv_s, label='{:.2f}'.format(P / mc))
# axes[1, 1].plot(tvImp_Vals_s / tscale, vImp_Vals_s / nu, label='{:.2f}'.format(P / mc), lw=3, color=colorList[ip])
# axes[0, 0].set_ylabel(r'$|S(t)|$', fontsize=27)
# # axes[0, 0].set_xlabel(r'$t/(\xi c^{-1})$', fontsize=27)
# axes[1, 0].plot(tsVals / tscale, np.ones(tsVals.size), 'k--', label='$c$')
# axes[1, 0].set_ylabel(r'$v_{\rm imp}(t) / c$', fontsize=27)
# axes[1, 0].set_xlabel(r'$t/(\xi c^{-1})$', fontsize=27)
# if logScale is True:
# # axes[0, 0].plot(tlin_norm * np.ones(DynOv_w.size), np.linspace(np.min(DynOv_w), np.max(DynOv_w), DynOv_w.size), 'k-')
# axes[0, 0].set_xscale('log')
# axes[0, 0].set_yscale('log')
# # axes[0, 0].set_ylim([7e-2, 1e0])
# axes[1, 0].set_xscale('log')
# # axes[0, 1].set_ylabel(r'$|S(t)|$', fontsize=27)
# # axes[0, 1].set_xlabel(r'$t/(\xi c^{-1})$', fontsize=27)
# axes[1, 1].plot(tsVals / tscale, np.ones(tsVals.size), 'k--', label='$c$')
# # axes[1, 1].set_ylabel(r'$\langle v_{\rm imp}\rangle / c$', fontsize=27)
# axes[1, 1].set_xlabel(r'$t/(\xi c^{-1})$', fontsize=27)
# if logScale is True:
# # axes[0, 1].plot(tlin_norm * np.ones(DynOv_s.size), np.linspace(np.min(DynOv_s), np.max(DynOv_s), DynOv_s.size), 'k-')
# axes[0, 1].set_xscale('log')
# axes[0, 1].set_yscale('log')
# # axes[0, 1].set_ylim([7e-2, 1e0])
# axes[1, 1].set_xscale('log')
# fig.text(0.06, 0.95, '(a)', fontsize=30)
# fig.text(0.52, 0.95, '(b)', fontsize=30)
# fig.text(0.06, 0.55, '(c)', fontsize=30)
# fig.text(0.52, 0.55, '(d)', fontsize=30)
# axes[0, 0].tick_params(which='both', direction='in', right=True, top=True)
# axes[0, 1].tick_params(which='both', direction='in', right=True, top=True)
# axes[1, 0].tick_params(which='both', direction='in', right=True, top=True)
# axes[1, 1].tick_params(which='both', direction='in', right=True, top=True)
# axes[0, 0].tick_params(which='major', length=6, width=1)
# axes[0, 1].tick_params(which='major', length=6, width=1)
# axes[1, 0].tick_params(which='major', length=6, width=1)
# axes[1, 1].tick_params(which='major', length=6, width=1)
# axes[0, 0].tick_params(which='minor', length=3, width=1)
# axes[0, 1].tick_params(which='minor', length=3, width=1)
# axes[1, 0].tick_params(which='minor', length=3, width=1)
# axes[1, 1].tick_params(which='minor', length=3, width=1)
# axes[0, 0].tick_params(axis='x', which='major', pad=10)
# axes[0, 1].tick_params(axis='x', which='major', pad=10)
# axes[1, 0].tick_params(axis='x', which='major', pad=10)
# axes[1, 1].tick_params(axis='x', which='major', pad=10)
# axes[0, 0].tick_params(axis='both', which='major', labelsize=20)
# axes[0, 1].tick_params(axis='both', which='major', labelsize=20)
# axes[1, 0].tick_params(axis='both', which='major', labelsize=20)
# axes[1, 1].tick_params(axis='both', which='major', labelsize=20)
# axes[0, 1].yaxis.set_major_formatter(NullFormatter())
# axes[0, 1].yaxis.set_minor_formatter(NullFormatter())
# # axes[0, 1].set_yticks([])
# axes[0, 1].yaxis.set_ticklabels([])
# axes[1, 1].yaxis.set_ticklabels([])
# handles, labels = axes[0, 0].get_legend_handles_labels()
# # fig.legend(handles, labels, title=r'$\langle v_{\rm imp}(t_{0})\rangle / c$', ncol=1, loc='center right', bbox_to_anchor=(0.11, 0.38))
# # fig.subplots_adjust(left=0.16, bottom=0.1, top=0.925, right=0.95, wspace=0.25, hspace=0.32)
# fig.legend(handles, labels, title=r'$v_{\rm imp}(t_{0}) / c$', ncol=Pnorm_des.size, loc='lower center', bbox_to_anchor=(0.55, 0.01), fontsize=25, title_fontsize=25)
# fig.subplots_adjust(left=0.1, bottom=0.22, top=0.925, right=0.95, wspace=0.1, hspace=0.32)
# fig.set_size_inches(16.9, 12)
# filename = '/Fig4.pdf'
# fig.savefig(figdatapath + filename)
# # # # FIG 5 - LOSCHMIDT ECHO EXPONENTS + FINAL LOSCHMIDT ECHO + FINAL IMPURITY VELOCITY
# DynOvData_roll = False
# DynOvData_rollwin = 2
# PimpData_roll = False
# PimpData_rollwin = 2
# DynOvExp_roll = False
# DynOvExp_rollwin = 2
# DynOvExp_NegMask = False
# DynOvExp_Cut = False
# cut = 1e-4
# consecDetection = True
# consecSamples = 10
# flattenAboveC = True
# aIBi_des = np.array([-10.0, -5.0, -3.5, -2.5, -2.0, -1.75])
# Pnorm = PVals / mc
# tmin = 90; tmax = 100
# tfVals = tVals[(tVals <= tmax) * (tVals >= tmin)]
# colorList = ['red', '#7e1e9c', 'green', 'orange', '#60460f', 'blue', 'magenta']
# lineList = ['solid', 'dashed', 'dotted', '-.']
# def powerfunc(t, a, b):
# return b * t**(-1 * a)
# Pcrit_da = xr.DataArray(np.full(aIBi_des.size, np.nan, dtype=float), coords=[aIBi_des], dims=['aIBi'])
# fig, axes = plt.subplots(nrows=3, ncols=1)
# for inda, aIBi in enumerate(aIBi_des):
# qds_aIBi = xr.open_dataset(innerdatapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
# # print(qds_aIBi['t'].values)
# qds_aIBi_ts = qds_aIBi.sel(t=tfVals)
# PVals = qds_aIBi['P'].values
# Pnorm = PVals / mc
# DynOv_Exponents = np.zeros(PVals.size)
# DynOv_Cov = np.full(PVals.size, np.nan)
# vImp_Exponents = np.zeros(PVals.size)
# vImp_Cov = np.full(PVals.size, np.nan)
# Plen = PVals.size
# Pstart_ind = 0
# vI0_Vals = (PVals - qds_aIBi.isel(t=0, P=np.arange(Pstart_ind, Plen))['Pph'].values) / mI
# DynOv_Exponents = np.zeros(PVals.size)
# DynOv_Constants = np.zeros(PVals.size)
# vImp_Exponents = np.zeros(PVals.size)
# vImp_Constants = np.zeros(PVals.size)
# DynOv_Rvalues = np.zeros(PVals.size)
# DynOv_Pvalues = np.zeros(PVals.size)
# DynOv_stderr = np.zeros(PVals.size)
# DynOv_tstat = np.zeros(PVals.size)
# DynOv_logAve = np.zeros(PVals.size)
# for indP, P in enumerate(PVals):
# DynOv_raw = np.abs(qds_aIBi_ts.isel(P=indP)['Real_DynOv'].values + 1j * qds_aIBi_ts.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# DynOv_ds = xr.DataArray(DynOv_raw, coords=[tfVals], dims=['t'])
# Pph_ds = xr.DataArray(qds_aIBi_ts.isel(P=indP)['Pph'].values, coords=[tfVals], dims=['t'])
# if DynOvData_roll:
# DynOv_ds = DynOv_ds.rolling(t=DynOvData_rollwin, center=True).mean().dropna('t')
# if PimpData_roll:
# Pph_ds = Pph_ds.rolling(t=PimpData_rollwin, center=True).mean().dropna('t')
# DynOv_Vals = DynOv_ds.values
# tDynOv_Vals = DynOv_ds['t'].values
# vImpc_Vals = (P - Pph_ds.values) / mI - nu
# tvImpc_Vals = Pph_ds['t'].values
# S_slope, S_intercept, S_rvalue, S_pvalue, S_stderr = ss.linregress(np.log(tDynOv_Vals), np.log(DynOv_Vals))
# DynOv_Exponents[indP] = -1 * S_slope
# DynOv_Constants[indP] = np.exp(S_intercept)
# DynOv_Rvalues[indP] = S_rvalue
# DynOv_Pvalues[indP] = S_pvalue
# DynOv_stderr[indP] = S_stderr
# DynOv_tstat[indP] = S_slope / S_stderr
# DynOv_logAve[indP] = np.average(np.log(DynOv_Vals))
# # if (-1 * S_slope) < 0:
# # DynOv_Exponents[indP] = 0
# if vImpc_Vals[-1] < 0:
# vImp_Exponents[indP] = 0
# vImp_Constants[indP] = vImpc_Vals[-1]
# else:
# vI_slope, vI_intercept, vI_rvalue, vI_pvalue, vI_stderr = ss.linregress(np.log(tvImpc_Vals), np.log(vImpc_Vals))
# vImp_Exponents[indP] = -1 * vI_slope
# vImp_Constants[indP] = np.exp(vI_intercept)
# if (-1 * vI_slope) < 0:
# vImp_Exponents[indP] = 0
# DynOvExponents_da = xr.DataArray(DynOv_Exponents, coords=[PVals], dims=['P'])
# if DynOvExp_roll:
# DynOvExponents_da = DynOvExponents_da.rolling(P=DynOvExp_rollwin, center=True).mean().dropna('P')
# if DynOvExp_NegMask:
# ExpMask = DynOvExponents_da.values < 0
# DynOvExponents_da[ExpMask] = 0
# if DynOvExp_Cut:
# ExpMask = np.abs(DynOvExponents_da.values) < cut
# DynOvExponents_da[ExpMask] = 0
# DynOv_Exponents = DynOvExponents_da.values
# if consecDetection:
# crit_ind = 0
# for indE, exp in enumerate(DynOv_Exponents):
# if indE > DynOv_Exponents.size - consecDetection:
# break
# expSlice = DynOv_Exponents[indE:(indE + consecSamples)]
# if np.all(expSlice > 0):
# crit_ind = indE
# break
# DynOvExponents_da[0:crit_ind] = 0
# DynOv_Exponents = DynOvExponents_da.values
# Pnorm_dynov = DynOvExponents_da['P'].values / mc
# DynOvf_Vals = powerfunc(1e1000, DynOv_Exponents, DynOv_Constants)
# Pcrit_da[inda] = PVals[crit_ind] / (mI * nu)
# vIf_Vals = nu + powerfunc(1e1000, vImp_Exponents, vImp_Constants)
# if flattenAboveC:
# vIf_Vals[vIf_Vals > nu] = nu
# axes[0].plot(Pnorm_dynov, DynOv_Exponents, color=colorList[inda], linestyle='solid', label='{:.2f}'.format(aIBi * xi), marker='D')
# # ax1.plot(Pnorm, vImp_Exponents, color=colorList[inda], linestyle='dotted', marker='+', markerfacecolor='none', label='{:.2f}'.format(aIBi))
# axes[1].plot(vI0_Vals / nu, DynOvf_Vals, color=colorList[inda], linestyle='solid', marker='D')
# axes[2].plot(vI0_Vals / nu, vIf_Vals / nu, color=colorList[inda], linestyle='solid', marker='D')
# # axes[0].set_xlabel(r'$\langle v_{I}(t_{0})\rangle/c$', fontsize=20))
# axes[0].set_ylabel(r'$\gamma$' + ' for ' + r'$|S(t)|\propto t^{-\gamma}$', fontsize=20)
# axes[0].set_xlim([0, 4])
# axes[0].set_ylim([-.02, 0.25])
# # axes[1].set_xlabel(r'$\langle v_{I}(t_{0})\rangle/c$', fontsize=20))
# axes[1].set_ylabel(r'$S(t_{\infty})$', fontsize=20)
# axes[1].set_xlim([0, 4])
# axes[1].set_ylim([-.05, 1.1])
# axes[2].plot(vI0_Vals / nu, np.ones(vI0_Vals.size), 'k:')
# axes[2].set_xlabel(r'$v_{\rm imp}(t_{0})/c$', fontsize=20)
# axes[2].set_ylabel(r'$v_{\rm imp}(t_{\infty})/c$', fontsize=20)
# axes[2].set_xlim([0, 4])
# axes[2].set_ylim([-.03, 1.1])
# fig.text(0.03, 0.97, '(a)', fontsize=20)
# fig.text(0.03, 0.7, '(b)', fontsize=20)
# fig.text(0.03, 0.42, '(c)', fontsize=20)
# axes[0].xaxis.set_ticklabels([])
# axes[1].xaxis.set_ticklabels([])
# axes[0].tick_params(which='both', direction='in', right=True, top=True)
# axes[1].tick_params(which='both', direction='in', right=True, top=True)
# axes[2].tick_params(which='both', direction='in', right=True, top=True)
# handles, labels = axes[0].get_legend_handles_labels()
# fig.legend(handles, labels, title=r'$a_{\rm IB}^{-1}/\xi^{-1}$', ncol=aIBi_des.size // 2, loc='lower center', bbox_to_anchor=(0.55, 0.01))
# fig.subplots_adjust(left=0.2, bottom=0.17, top=0.97, right=0.97, hspace=0.15)
# fig.set_size_inches(6, 12)
# fig.savefig(figdatapath + '/Fig5.pdf')
# # # # FIG DPT - NESS + GS PHASE DIAGRAM
# DynOvExp_NegMask = False
# DynOvExp_Cut = False
# cut = 1e-4
# consecDetection = True
# consecSamples = 10
# def powerfunc(t, a, b):
# return b * t**(-1 * a)
# tmin = 90
# tmax = 100
# tfVals = tVals[(tVals <= tmax) * (tVals >= tmin)]
# rollwin = 1
# colorList = ['red', '#7e1e9c', 'green', 'orange', '#60460f', 'blue', 'magenta']
# lineList = ['solid', 'dashed', 'dotted', '-.']
# aIBi_des = np.array([-10.0, -5.0, -3.5, -2.5, -2.0, -1.75])
# massRat_des = np.array([0.5, 1.0, 2.0])
# mdatapaths = []
# for mR in massRat_des:
# if toggleDict['noCSAmp'] is True:
# mdatapaths.append(datapath[0:-11] + '{:.1f}_noCSAmp'.format(mR))
# else:
# mdatapaths.append(datapath[0:-3] + '{:.1f}_noCSAmp'.format(mR))
# if toggleDict['Dynamics'] != 'real' or toggleDict['Grid'] != 'spherical' or toggleDict['Coupling'] != 'twophonon':
# print('SETTING ERROR')
# Pcrit_da = xr.DataArray(np.full((massRat_des.size, aIBi_des.size), np.nan, dtype=float), coords=[massRat_des, aIBi_des], dims=['mRatio', 'aIBi'])
# for inda, aIBi in enumerate(aIBi_des):
# for indm, mRat in enumerate(massRat_des):
# mds = xr.open_dataset(mdatapaths[indm] + '/redyn_spherical/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
# Plen = mds.coords['P'].values.size
# Pstart_ind = 0
# PVals = mds.coords['P'].values[Pstart_ind:Plen]
# n0 = mds.attrs['n0']
# gBB = mds.attrs['gBB']
# mI = mds.attrs['mI']
# mB = mds.attrs['mB']
# nu = np.sqrt(n0 * gBB / mB)
# vI0_Vals = (PVals - mds.isel(t=0, P=np.arange(Pstart_ind, Plen))['Pph'].values) / mI
# mds_ts = mds.sel(t=tfVals)
# DynOv_Exponents = np.zeros(PVals.size)
# DynOv_Constants = np.zeros(PVals.size)
# for indP, P in enumerate(PVals):
# DynOv_raw = np.abs(mds_ts.isel(P=indP)['Real_DynOv'].values + 1j * mds_ts.isel(P=indP)['Imag_DynOv'].values).real.astype(float)
# DynOv_ds = xr.DataArray(DynOv_raw, coords=[tfVals], dims=['t'])
# # DynOv_ds = DynOv_ds.rolling(t=rollwin, center=True).mean().dropna('t')
# DynOv_Vals = DynOv_ds.values
# tDynOvc_Vals = DynOv_ds['t'].values
# S_slope, S_intercept, S_rvalue, S_pvalue, S_stderr = ss.linregress(np.log(tDynOvc_Vals), np.log(DynOv_Vals))
# DynOv_Exponents[indP] = -1 * S_slope
# DynOv_Constants[indP] = np.exp(S_intercept)
# if DynOvExp_NegMask:
# DynOv_Exponents[DynOv_Exponents < 0] = 0
# if DynOvExp_Cut:
# DynOv_Exponents[np.abs(DynOv_Exponents) < cut] = 0
# if consecDetection:
# crit_ind = 0
# for indE, exp in enumerate(DynOv_Exponents):
# if indE > DynOv_Exponents.size - consecDetection:
# break
# expSlice = DynOv_Exponents[indE:(indE + consecSamples)]
# if np.all(expSlice > 0):
# crit_ind = indE
# break
# DynOv_Exponents[0:crit_ind] = 0
# Pcrit_da[indm, inda] = PVals[crit_ind] / (mI * nu)
# DynOvf_Vals = powerfunc(1e1000, DynOv_Exponents, DynOv_Constants)
# PcritInterp = False
# plotGS = True
# Pcrit_interpVals_mRat1 = 0
# fig2, ax2 = plt.subplots()
# for indm, massRat in enumerate(massRat_des):
# if PcritInterp is True:
# Pcrit_norm = Pcrit_da.sel(mRatio=massRat).values
# Pcrit_tck = interpolate.splrep(aIBi_des, Pcrit_norm, s=0, k=1)
# aIBi_interpVals = np.linspace(np.min(aIBi_des), np.max(aIBi_des), 2 * aIBi_des.size)
# Pcrit_interpVals = 1 * interpolate.splev(aIBi_interpVals, Pcrit_tck, der=0)
# else:
# aIBi_interpVals = aIBi_des
# Pcrit_interpVals = Pcrit_da.sel(mRatio=massRat).values
# if massRat == 1.0:
# Pcrit_interpVals_mRat1 = Pcrit_interpVals
# # ax2.plot(aIBi_interpVals /xi, Pcrit_interpVals, color='k', linestyle=lineList[indm], label='{0}'.format(massRat))
# # ax2.plot(aIBi_interpVals / xi, Pcrit_interpVals, color='k', linestyle=lineList[indm], label='NESS')
# # ax2.plot(aIBi_des / xi, Pcrit_da.sel(mRatio=massRat).values, 'kx', mew=2, ms=12, label='NESS')
# ax2.plot(aIBi_des / xi, Pcrit_da.sel(mRatio=massRat).values, color=colorList[indm], linestyle='', marker='d', mew=2, ms=12, label='{0}'.format(massRat))
# xmin = np.min(aIBi_interpVals / xi)
# xmax = 1.01 * np.max(aIBi_interpVals / xi)
# ymin = 0
# ymax = 1.01 * np.max(Pcrit_da.values)
# font = {'family': 'serif', 'color': 'black', 'size': 16}
# sfont = {'family': 'serif', 'color': 'black', 'size': 15}
# if massRat_des.size > 1:
# ax2.legend(title=r'$m_{I}/{m_{B}$', loc=2)
# ax2.set_xlabel(r'$a_{IB}^{-1}$ [$\xi$]', fontsize=20)
# ax2.set_ylabel(r'$\langle v_{I}(t_{0})\rangle/c$', fontsize=20)
# ax2.text(-4.5, ymin + 0.175 * (ymax - ymin), 'Polaron', fontdict=font)
# ax2.text(-4.4, ymin + 0.1 * (ymax - ymin), '(' + r'$S(t_{\infty})>0$' + ')', fontdict=sfont)
# ax2.text(-7.0, ymin + 0.63 * (ymax - ymin), 'Cherenkov', fontdict=font)
# ax2.text(-6.85, ymin + 0.555 * (ymax - ymin), '(' + r'$S(t_{\infty})=0$' + ')', fontdict=sfont)
# # ax2.fill_between(aIBi_interpVals / xi, Pcrit_interpVals_mRat1, ymax, facecolor='b', alpha=0.25)
# # ax2.fill_between(aIBi_interpVals / xi, ymin, Pcrit_interpVals_mRat1, facecolor='g', alpha=0.25)
# ax2.set_xlim([xmin, xmax])
# ax2.set_ylim([ymin, ymax])
# if plotGS is True:
# gs_datapath = '/Users/kis/Dropbox/VariationalResearch/HarvardOdyssey/genPol_data/NGridPoints_1.44E+06/massRatio=1.0/imdyn_spherical'
# aIBi_Vals = np.array([-10.0, -9.0, -8.0, -7.0, -5.0, -3.5, -2.0, -1.0]) # used by many plots (spherical)
# Pcrit = np.zeros(aIBi_Vals.size)
# for aind, aIBi in enumerate(aIBi_Vals):
# qds_aIBi = xr.open_dataset(gs_datapath + '/quench_Dataset_aIBi_{:.2f}.nc'.format(aIBi))
# PVals = qds_aIBi['P'].values
# CSAmp_ds = qds_aIBi['Real_CSAmp'] + 1j * qds_aIBi['Imag_CSAmp']
# kgrid = Grid.Grid("SPHERICAL_2D"); kgrid.initArray_premade('k', CSAmp_ds.coords['k'].values); kgrid.initArray_premade('th', CSAmp_ds.coords['th'].values)
# Energy_Vals_inf = np.zeros(PVals.size)
# for Pind, P in enumerate(PVals):
# CSAmp = CSAmp_ds.sel(P=P).isel(t=-1).values
# Energy_Vals_inf[Pind] = pfs.Energy(CSAmp, kgrid, P, aIBi, mI, mB, n0, gBB)
# Einf_tck = interpolate.splrep(PVals, Energy_Vals_inf, s=0)
# Pinf_Vals = np.linspace(np.min(PVals), np.max(PVals), 2 * PVals.size)
# Einf_Vals = 1 * interpolate.splev(Pinf_Vals, Einf_tck, der=0)
# Einf_2ndderiv_Vals = 1 * interpolate.splev(Pinf_Vals, Einf_tck, der=2)
# Pcrit[aind] = Pinf_Vals[np.argmin(np.gradient(Einf_2ndderiv_Vals)) - 0]
# Pcrit_norm = Pcrit / (mI * nu)
# Pcrit_tck = interpolate.splrep(aIBi_Vals, Pcrit_norm, s=0, k=3)
# aIBi_interpVals = np.linspace(np.min(aIBi_Vals), np.max(aIBi_Vals), 5 * aIBi_Vals.size)
# Pcrit_interpVals = 1 * interpolate.splev(aIBi_interpVals, Pcrit_tck, der=0)
# ax2.plot(aIBi_interpVals / xi, Pcrit_interpVals, color='k', linestyle='solid', label='Ground State')
# ax2.fill_between(aIBi_interpVals / xi, Pcrit_interpVals, ymax, facecolor='b', alpha=0.25)
# ax2.fill_between(aIBi_interpVals / xi, ymin, Pcrit_interpVals, facecolor='g', alpha=0.25)
# ax2.legend(loc=2)
# fig2.set_size_inches(6, 4.5)
# fig2.subplots_adjust(bottom=0.17, top=0.97, left=0.15, right=0.97)
# fig2.savefig(figdatapath + '/FigDPT_mRat.pdf')
# # # # FIG 6 - PARTICIPATION RATIO CURVES VS INITIAL VELOCITY (SPHERICAL APPROXIMATION TO CARTESIAN INTERPOLATION)
# # NOTE: We need the massRatio_1.0_old folder (or technically any of the _old folders) and the constants determined at the beginning of the script for this to run
# inversePlot = True
# # PRtype = 'continuous'
# PRtype = 'discrete'; discPR_norm = True
# Vol_fac = False
# tau = 2.3
# # tau = 5
# # NOTE: The following constants are grid dependent (both on original spherical grid and interpolated cartesian grid)
# dVk_cart = 0.0001241449577749997 # = dkx*dky*dkz from cartesian interpolation
# Npoints_xyz = 85184000
# Vxyz = 1984476.915083265
# contToDisc_factor = dVk_cart / ((2 * np.pi)**3)
# colorList = ['red', '#7e1e9c', '#60460f', '#658b38']
# # colorList = ['red', '#7e1e9c', 'green', 'orange', '#60460f', 'blue', 'magenta']
# lineList = ['solid', 'dotted', 'dashed', 'dashdot']
# aIBi_des = np.array([-10.0, -5.0, -2.0, -1.5])
# # aIBi_des = np.array([-10.0, -5.0, -2.0, -1.5, -1.25, -1.0])
# massRat_des = np.array([1.0])
# # massRat_des = np.array([0.5, 1.0, 2])
# mdatapaths = []
# for mR in massRat_des:
# mdatapaths.append('/Users/kis/Dropbox/VariationalResearch/HarvardOdyssey/genPol_data/NGridPoints_{:.2E}_resRat_{:.2f}/massRatio={:.1f}'.format(NGridPoints_cart, resRat, mR))
# if toggleDict['Dynamics'] != 'real' or toggleDict['Grid'] != 'spherical' or toggleDict['Coupling'] != 'twophonon':
# print('SETTING ERROR')
# kgrid = Grid.Grid("SPHERICAL_2D"); kgrid.initArray_premade('k', qds_aIBi.coords['k'].values); kgrid.initArray_premade('th', qds_aIBi.coords['th'].values)
# kVec = kgrid.getArray('k')
# thVec = kgrid.getArray('th')
# kg, thg = np.meshgrid(kVec, thVec, indexing='ij')
# dVk = kgrid.dV()
# print(kVec[-1], kVec[1] - kVec[0])
# PRcont_Averages = np.zeros(PVals.size)
# PRdisc_Averages = np.zeros(PVals.size)
# P_Vals_norm = np.concatenate((np.linspace(0.1, 0.8, 5, endpoint=False), np.linspace(0.8, 1.4, 10, endpoint=False), np.linspace(1.4, 3.0, 12, endpoint=False), np.linspace(3.0, 5.0, 10, endpoint=False), np.linspace(5.0, 9.0, 20)))
# fig1, ax1 = plt.subplots()
# for inda, aIBi in enumerate(aIBi_des):
# for indm, mRat in enumerate(massRat_des):
# vI0_Vals = np.zeros(PVals.size)
# PR_Averages = np.zeros(PVals.size)
# PVals = mRat * mB * nu * P_Vals_norm
# for indP, P in enumerate(PVals):
# qds_PaIBi = xr.open_dataset(mdatapaths[indm] + '/redyn_spherical/P_{:.3f}_aIBi_{:.2f}.nc'.format(P, aIBi))
# CSAmp_ds = (qds_PaIBi['Real_CSAmp'] + 1j * qds_PaIBi['Imag_CSAmp'])
# Nph_ds = qds_PaIBi['Nph']
# mI = qds_PaIBi.attrs['mI']
# if Lx == 60:
# CSAmp_ds = CSAmp_ds.rename({'tc': 't'})
# tsVals = CSAmp_ds.coords['t'].values
# tsVals = tsVals[tsVals <= tau]
# CSAmp_ds = CSAmp_ds.sel(t=tsVals)
# Nph_ds = Nph_ds.sel(t=tsVals)
# PR_Vals = np.zeros(tsVals.size)
# dt = tsVals[1] - tsVals[0]
# for indt, t in enumerate(tsVals):
# CSAmp_Vals = CSAmp_ds.sel(t=t).values
# Bk_2D_vals = CSAmp_Vals.reshape((len(kVec), len(thVec)))
# PhDen_Vals = ((2 * np.pi)**(-3)) * ((1 / Nph_ds.sel(t=t).values) * np.abs(Bk_2D_vals)**2).real.astype(float)
# dVk_n = ((2 * np.pi)**(3)) * dVk
# PR_Vals[indt] = (2 * np.pi)**3 * np.dot((PhDen_Vals**2).flatten(), dVk_n)
# vI0_Vals[indP] = (P - qds_PaIBi.isel(t=0)['Pph'].values) / mI
# PR_Vals_del = np.delete(PR_Vals, 0); PR_Averages[indP] = (1 / (tsVals[-1] - tsVals[1])) * simps(y=PR_Vals_del, dx=dt)
# if Vol_fac is True:
# PR_Averages = Vxyz * PR_Averages
# else:
# PR_Averages = PR_Averages
# if PRtype == 'continuous':
# PR_Averages = PR_Averages
# elif PRtype == 'discrete':
# PR_Averages = PR_Averages * contToDisc_factor
# if discPR_norm is True:
# PR_Averages = PR_Averages * Npoints_xyz
# if inversePlot is True:
# ax1.plot(vI0_Vals / nu, 1 / PR_Averages, linestyle=lineList[indm], color=colorList[inda])
# else:
# ax1.plot(vI0_Vals / nu, PR_Averages, linestyle=lineList[indm], color=colorList[inda])
# alegend_elements = []
# mlegend_elements = []
# for inda, aIBi in enumerate(aIBi_des):
# alegend_elements.append(Line2D([0], [0], color=colorList[inda], linestyle='solid', label='{:.2f}'.format(aIBi / xi)))
# for indm, mR in enumerate(massRat_des):
# mlegend_elements.append(Line2D([0], [0], color='magenta', linestyle=lineList[indm], label='{0}'.format(mR)))
# ax1.set_xlabel(r'$\langle v_{I}(t_{0})\rangle /c$', fontsize=20)
# if inversePlot is True:
# # ax1.set_title('Short-Time-Averaged Inverse Participation Ratio (' + r'$t\in[0, $' + '{:.2f}'.format(tau / tscale) + r'$\frac{\xi}{c}]$)')
# if PRtype == 'continuous':
# ax1.set_ylabel(r'Average $IPR$ with $IPR = ((2\pi)^{3} \int d^3\vec{k} (\frac{1}{(2\pi)^3}\frac{1}{N_{ph}}|\beta_{\vec{k}}|^{2})^{2})^{-1}$')
# elif PRtype == 'discrete':
# if discPR_norm is True:
# # ax1.set_ylabel(r'Average $IPR$ (Normalized by $N_{tot}$ modes in system)')
# ax1.set_ylabel(r'$\overline{IPR}/N_{tot}$', fontsize=20)
# else:
# ax1.set_ylabel(r'Average $IPR$')
# else:
# # ax1.set_title('Time-Averaged Participation Ratio (' + r'$t\in[0, $' + '{:.2f}'.format(tau / tscale) + r'$\frac{\xi}{c}]$)')
# ax1.set_ylabel(r'Average $PR$ with $PR = (2\pi)^{3} \int d^3\vec{k} (\frac{1}{(2\pi)^3}\frac{1}{N_{ph}}|\beta_{\vec{k}}|^{2})^{2}$')
# alegend = ax1.legend(handles=alegend_elements, loc=2, title=r'$a_{IB}^{-1}$ [$\xi$]', ncol=2)
# plt.gca().add_artist(alegend)
# # mlegend = ax1.legend(handles=mlegend_elements, loc=(0.22, 0.70), ncol=2, title=r'$m_{I}/m_{B}$')
# # plt.gca().add_artist(mlegend)
# # ax1.set_xlim([0, np.max(vI0_Vals / nu)])
# ax1.set_xlim([0, 4])
# ax1.set_ylim([0.004, 0.009])
# # ax1.yaxis.set_major_formatter(FormatStrFormatter('%.3f'))
# ax1.ticklabel_format(axis='y', style='sci', scilimits=(0, 0))
# ax1.yaxis.set_major_locator(plt.MaxNLocator(5))
# ax1.xaxis.set_major_locator(plt.MaxNLocator(4))
# fig1.set_size_inches(6, 3.9)
# fig1.subplots_adjust(bottom=0.17, top=0.94, right=0.97)
# # fig1.savefig(figdatapath + '/Fig6.pdf')
# # # FIG 7 - INDIVIDUAL PHONON MOMENTUM DISTRIBUTION PLOT SLICES
# matplotlib.rcParams.update({'font.size': 18})
# class HandlerEllipse(HandlerPatch):
# def create_artists(self, legend, orig_handle,
# xdescent, ydescent, width, height, fontsize, trans):
# center = 0.5 * width - 0.5 * xdescent, 0.5 * height - 0.5 * ydescent
# p = Ellipse(xy=center, width=width + xdescent,
# height=height + ydescent)
# self.update_prop(p, orig_handle, legend)
# p.set_transform(trans)
# return [p]
# Pnorm_des = np.array([0.1, 0.5, 0.8, 1.3, 1.5, 1.8, 3.0, 3.5, 4.0, 5.0, 8.0])
# Pinds = np.zeros(Pnorm_des.size, dtype=int)
# for Pn_ind, Pn in enumerate(Pnorm_des):
# Pinds[Pn_ind] = np.abs(Pnorm - Pn).argmin().astype(int)
# print(PVals[Pinds])
# indP = Pinds[5]
# P = PVals[indP]
# print(aIBi, P)
# vmaxAuto = False
# FGRBool = True; FGRlim = 1e-2
# IRpatch = False
# shortTime = False; tau = 5
# # tau = 100
# # tsVals = tVals[tVals < tau]
# if Lx == 60:
# qds_PaIBi = xr.open_dataset(distdatapath + '/P_{:.3f}_aIBi_{:.2f}.nc'.format(P, aIBi))
# tsVals = qds_PaIBi.coords['tc'].values
# else:
# # qds_PaIBi = qds_aIBi.sel(t=tsVals, P=P)
# qds_PaIBi = qds_aIBi.sel(P=P)
# tsVals = qds_PaIBi.coords['t'].values
# if shortTime is True:
# tsVals = tsVals[tsVals <= tau]
# kgrid = Grid.Grid("SPHERICAL_2D"); kgrid.initArray_premade('k', qds_PaIBi.coords['k'].values); kgrid.initArray_premade('th', qds_PaIBi.coords['th'].values)
# kVec = kgrid.getArray('k')
# thVec = kgrid.getArray('th')
# kg, thg = np.meshgrid(kVec, thVec, indexing='ij')
# dVk = kgrid.dV()
# axislim = 1.2
# if shortTime is True:
# axislim = 1.01 * P
# # kIRcut = 0.13
# # axislim = 3
# kIRcut = 0.1
# if Lx == 60:
# kIRcut = 0.01
# if vmaxAuto is True:
# kIRcut = -1
# kIRmask = kg < kIRcut
# dVk_IR = dVk.reshape((len(kVec), len(thVec)))[kIRmask]
# axmask = (kg >= kIRcut) * (kg <= axislim)
# dVk_ax = dVk.reshape((len(kVec), len(thVec)))[axmask]
# Omegak_da = xr.DataArray(np.full((tsVals.size, len(kVec), len(thVec)), np.nan, dtype=float), coords=[tsVals, kVec, thVec], dims=['t', 'k', 'th'])
# PhDen_da = xr.DataArray(np.full((tsVals.size, len(kVec), len(thVec)), np.nan, dtype=float), coords=[tsVals, kVec, thVec], dims=['t', 'k', 'th'])
# Nph_Vals = np.zeros(tsVals.size)
# Pph_Vals = np.zeros(tsVals.size)
# Pimp_Vals = np.zeros(tsVals.size)
# norm_IRpercent = np.zeros(tsVals.size)
# norm_axpercent = np.zeros(tsVals.size)
# vmax = 0
# for tind, t in enumerate(tsVals):
# if Lx == 60:
# CSAmp_ds = (qds_PaIBi['Real_CSAmp'] + 1j * qds_PaIBi['Imag_CSAmp']).sel(tc=t)
# else:
# CSAmp_ds = (qds_PaIBi['Real_CSAmp'] + 1j * qds_PaIBi['Imag_CSAmp']).sel(t=t)
# CSAmp_Vals = CSAmp_ds.values
# Nph_Vals[tind] = qds_PaIBi['Nph'].sel(t=t).values
# Pph_Vals[tind] = qds_PaIBi['Pph'].sel(t=t).values
# Pimp_Vals[tind] = P - Pph_Vals[tind]
# Bk_2D_vals = CSAmp_Vals.reshape((len(kVec), len(thVec)))
# PhDen_da.sel(t=t)[:] = ((1 / Nph_Vals[tind]) * np.abs(Bk_2D_vals)**2).real.astype(float)
# norm_tot = np.dot(PhDen_da.sel(t=t).values.flatten(), dVk)
# PhDen_IR = PhDen_da.sel(t=t).values[kIRmask]
# norm_IR = np.dot(PhDen_IR.flatten(), dVk_IR.flatten())
# norm_IRpercent[tind] = 100 * np.abs(norm_IR / norm_tot)
# # print(norm_IRpercent[tind])
# PhDen_ax = PhDen_da.sel(t=t).values[axmask]
# norm_ax = np.dot(PhDen_ax.flatten(), dVk_ax.flatten())
# norm_axpercent[tind] = 100 * np.abs(norm_ax / norm_tot)
# Omegak_da.sel(t=t)[:] = pfs.Omega(kgrid, Pimp_Vals[tind], mI, mB, n0, gBB).reshape((len(kVec), len(thVec))).real.astype(float)
# # print(Omegak_da.sel(t=t))
# maxval = np.max(PhDen_da.sel(t=t).values[np.logical_not(kIRmask)])
# if maxval > vmax:
# vmax = maxval
# # Plot slices
# tnorm = tsVals / tscale
# tnVals_des = np.array([0.5, 8.0, 15.0, 25.0, 40.0, 75.0])
# tninds = np.zeros(tnVals_des.size, dtype=int)
# for tn_ind, tn in enumerate(tnVals_des):
# tninds[tn_ind] = np.abs(tnorm - tn).argmin().astype(int)
# tslices = tsVals[tninds]
# print(vmax)
# vmin = 0
# if (vmaxAuto is False) and (Lx != 60):
# vmax = 800
# if shortTime is True:
# vmax = 200
# interpmul = 5
# if Lx == 60:
# PhDen0_interp_vals = PhDen_da.isel(t=0).values
# kxg_interp = kg * np.sin(thg)
# kzg_interp = kg * np.cos(thg)
# else:
# PhDen0_interp_vals, kg_interp, thg_interp = pfc.xinterp2D(PhDen_da.isel(t=0), 'k', 'th', interpmul)
# kxg_interp = kg_interp * np.sin(thg_interp)
# kzg_interp = kg_interp * np.cos(thg_interp)
# vmax = 3000
# fig, axes = plt.subplots(nrows=3, ncols=2)
# for tind, t in enumerate(tslices):
# if tind == 0:
# ax = axes[0, 0]
# elif tind == 1:
# ax = axes[0, 1]
# if tind == 2:
# ax = axes[1, 0]
# if tind == 3:
# ax = axes[1, 1]
# if tind == 4:
# ax = axes[2, 0]
# if tind == 5:
# ax = axes[2, 1]
# PhDen_interp_vals = PhDen_da.sel(t=t).values
# if vmaxAuto is True:
# quad1 = ax.pcolormesh(kzg_interp, kxg_interp, PhDen_interp_vals[:-1, :-1], norm=colors.LogNorm(vmin=1e-3, vmax=vmax), cmap='inferno')
# quad1m = ax.pcolormesh(kzg_interp, -1 * kxg_interp, PhDen_interp_vals[:-1, :-1], norm=colors.LogNorm(vmin=1e-3, vmax=vmax), cmap='inferno')
# else:
# quad1 = ax.pcolormesh(kzg_interp, kxg_interp, PhDen_interp_vals[:-1, :-1], vmin=vmin, vmax=vmax, cmap='inferno')
# quad1m = ax.pcolormesh(kzg_interp, -1 * kxg_interp, PhDen_interp_vals[:-1, :-1], vmin=vmin, vmax=vmax, cmap='inferno')
# curve1 = ax.plot(Pph_Vals[tninds[tind]], 0, marker='x', markersize=10, zorder=11, color="xkcd:steel grey")[0]
# curve1m = ax.plot(Pimp_Vals[tninds[tind]], 0, marker='o', markersize=10, zorder=11, color="xkcd:apple green")[0]
# curve2 = ax.plot(mc, 0, marker='*', markersize=10, zorder=11, color="cyan")[0]
# def rfunc(k): return (pfs.omegak(k, mB, n0, gBB) - 2 * np.pi / tsVals[tninds[tind]])
# kroot = fsolve(rfunc, 1e8); kroot = kroot[kroot >= 0]
# patch_Excitation = plt.Circle((0, 0), kroot[0], edgecolor='red', facecolor='None', linewidth=2)
# ax.add_patch(patch_Excitation)
# # patch_klin = plt.Circle((0, 0), klin, edgecolor='tab:cyan', facecolor='None')
# # ax.add_patch(patch_klin)
# if IRpatch is True:
# patch_IR = plt.Circle((0, 0), kIRcut, edgecolor='#8c564b', facecolor='#8c564b')
# ax.add_patch(patch_IR)
# IR_text = ax.text(0.61, 0.75, r'Weight (IR patch): ' + '{:.2f}%'.format(norm_IRpercent[tninds[tind]]), transform=ax.transAxes, fontsize='small', color='#8c564b')
# rem_text = ax.text(0.61, 0.675, r'Weight (Rem vis): ' + '{:.2f}%'.format(norm_axpercent[tninds[tind]]), transform=ax.transAxes, fontsize='small', color='yellow')
# if FGRBool is True:
# if Lx == 60:
# Omegak_interp_vals = Omegak_da.sel(t=t).values
# else:
# Omegak_interp_vals, kg_interp, thg_interp = pfc.xinterp2D(Omegak_da.sel(t=t), 'k', 'th', interpmul)
# FGRmask0 = np.abs(Omegak_interp_vals) < FGRlim
# Omegak_interp_vals[FGRmask0] = 1
# Omegak_interp_vals[np.logical_not(FGRmask0)] = 0
# p = []
# p.append(ax.contour(kzg_interp, kxg_interp, Omegak_interp_vals, zorder=10, colors='tab:gray'))
# p.append(ax.contour(kzg_interp, -1 * kxg_interp, Omegak_interp_vals, zorder=10, colors='tab:gray'))
# p.append(ax.contour(Pimp_Vals[tind] - kzg_interp, -1 * kxg_interp, Omegak_interp_vals, zorder=10, colors='xkcd:military green'))
# p.append(ax.contour(Pimp_Vals[tind] - kzg_interp, -1 * (-1) * kxg_interp, Omegak_interp_vals, zorder=10, colors='xkcd:military green'))
# ax.set_xlim([-1 * axislim, axislim])
# ax.set_ylim([-1 * axislim, axislim])
# ax.grid(True, linewidth=0.5)
# ax.set_title(r'$t$ [$\xi/c$]: ' + '{:1.2f}'.format(tsVals[tninds[tind]] / tscale))
# ax.set_xlabel(r'$k_{z}$')
# ax.set_ylabel(r'$k_{x}$')
# curve1_LE = Line2D([0], [0], color='none', lw=0, marker='x', markerfacecolor='xkcd:steel grey', markeredgecolor='xkcd:steel grey', markersize=10)
# curve1m_LE = Line2D([0], [0], color='none', lw=0, marker='o', markerfacecolor='xkcd:apple green', markeredgecolor='xkcd:apple green', markersize=10)
# curve2_LE = Line2D([0], [0], color='none', lw=0, marker='*', markerfacecolor='cyan', markeredgecolor='cyan', markersize=10)
# patch_Excitation_LE = Line2D([0], [0], marker='o', color='none', markerfacecolor='none', markeredgecolor='red', markersize=20, mew=2)
# # patch_klin_LE = Line2D([0], [0], marker='o', color='none', markerfacecolor='none', markeredgecolor='tab:cyan', markersize=20, mew=2)
# patch_FGR_ph_LE = Ellipse(xy=(0, 0), width=0.2, height=0.1, angle=0, edgecolor='tab:gray', facecolor='none', lw=3)
# patch_FGR_imp_LE = Ellipse(xy=(0, 0), width=0.2, height=0.1, angle=0, edgecolor='xkcd:military green', facecolor='none', lw=3)
# if IRpatch is True:
# handles = (curve1_LE, curve1m_LE, curve2_LE, patch_Excitation_LE, patch_IR, patch_FGR_ph_LE, patch_FGR_imp_LE)
# labels = (r'$\langle P_{ph} \rangle$', r'$\langle P_{I} \rangle$', r'$(m_{I}c)\vec{e}_{k_{z}}$', r'$\omega_{|k|}^{-1}(\frac{2\pi}{t})$', r'Singular Region', 'FGR Phase Space (ph)', 'FGR Phase Space (imp)')
# else:
# handles = (curve1_LE, curve1m_LE, curve2_LE, patch_Excitation_LE, patch_FGR_ph_LE, patch_FGR_imp_LE)
# labels = (r'$\langle \mathbf{P}_{\rm ph} \rangle$', r'$\langle \mathbf{P}_{\rm imp} \rangle$', r'$(m_{I}c)\mathbf{e}_{k_{z}}$', r'$\omega_{\mathbf{k}}^{-1}(\frac{2\pi}{t})$', 'FGR Phase Space (ph)', 'FGR Phase Space (imp)')
# cbar_ax = fig.add_axes([0.9, 0.2, 0.02, 0.7])
# fig.colorbar(quad1, cax=cbar_ax, extend='both')
# fig.legend(handles, labels, ncol=3, loc='lower center', handler_map={Ellipse: HandlerEllipse()})
# fig.text(0.05, 0.97, '(a)', fontsize=20)
# fig.text(0.05, 0.68, '(c)', fontsize=20)
# fig.text(0.05, 0.38, '(e)', fontsize=20)
# fig.text(0.47, 0.97, '(b)', fontsize=20)
# fig.text(0.47, 0.68, '(d)', fontsize=20)
# fig.text(0.47, 0.38, '(f)', fontsize=20)
# fig.set_size_inches(12, 12)
# fig.subplots_adjust(bottom=0.17, top=0.95, right=0.85, hspace=0.6, wspace=0.4)
# # fig.savefig(figdatapath + '/Fig7.pdf', dpi=20)
# fig.savefig(figdatapath + '/Fig7.jpg', quality=100)
# # # FIG 7 - INDIVIDUAL PHONON MOMENTUM DISTRIBUTION PLOT SLICES (OLD)
# matplotlib.rcParams.update({'font.size': 18})
# class HandlerEllipse(HandlerPatch):
# def create_artists(self, legend, orig_handle,
# xdescent, ydescent, width, height, fontsize, trans):
# center = 0.5 * width - 0.5 * xdescent, 0.5 * height - 0.5 * ydescent
# p = Ellipse(xy=center, width=width + xdescent,
# height=height + ydescent)
# self.update_prop(p, orig_handle, legend)
# p.set_transform(trans)
# return [p]
# Pnorm_des = np.array([0.1, 0.5, 0.8, 1.3, 1.5, 1.8, 3.0, 3.5, 4.0, 5.0, 8.0])
# Pinds = np.zeros(Pnorm_des.size, dtype=int)
# for Pn_ind, Pn in enumerate(Pnorm_des):
# Pinds[Pn_ind] = np.abs(Pnorm - Pn).argmin().astype(int)
# print(PVals[Pinds])
# indP = Pinds[5]
# P = PVals[indP]
# print(aIBi, P)
# vmaxAuto = False
# FGRBool = True; FGRlim = 1e-2
# IRpatch = False
# shortTime = False; tau = 5
# # tau = 100
# # tsVals = tVals[tVals < tau]
# if Lx == 60:
# qds_PaIBi = xr.open_dataset(distdatapath + '/P_{:.3f}_aIBi_{:.2f}.nc'.format(P, aIBi))
# tsVals = qds_PaIBi.coords['tc'].values
# else:
# # qds_PaIBi = qds_aIBi.sel(t=tsVals, P=P)
# qds_PaIBi = qds_aIBi.sel(P=P)
# tsVals = qds_PaIBi.coords['t'].values
# if shortTime is True:
# tsVals = tsVals[tsVals <= tau]
# kgrid = Grid.Grid("SPHERICAL_2D"); kgrid.initArray_premade('k', qds_PaIBi.coords['k'].values); kgrid.initArray_premade('th', qds_PaIBi.coords['th'].values)
# kVec = kgrid.getArray('k')
# thVec = kgrid.getArray('th')
# kg, thg = np.meshgrid(kVec, thVec, indexing='ij')
# dVk = kgrid.dV()
# axislim = 1.2
# if shortTime is True:
# axislim = 1.01 * P
# # kIRcut = 0.13
# # axislim = 3
# kIRcut = 0.1
# if Lx == 60:
# kIRcut = 0.01
# if vmaxAuto is True:
# kIRcut = -1
# kIRmask = kg < kIRcut
# dVk_IR = dVk.reshape((len(kVec), len(thVec)))[kIRmask]
# axmask = (kg >= kIRcut) * (kg <= axislim)
# dVk_ax = dVk.reshape((len(kVec), len(thVec)))[axmask]
# Omegak_da = xr.DataArray(np.full((tsVals.size, len(kVec), len(thVec)), np.nan, dtype=float), coords=[tsVals, kVec, thVec], dims=['t', 'k', 'th'])
# PhDen_da = xr.DataArray(np.full((tsVals.size, len(kVec), len(thVec)), np.nan, dtype=float), coords=[tsVals, kVec, thVec], dims=['t', 'k', 'th'])
# Nph_Vals = np.zeros(tsVals.size)
# Pph_Vals = np.zeros(tsVals.size)
# Pimp_Vals = np.zeros(tsVals.size)
# norm_IRpercent = np.zeros(tsVals.size)
# norm_axpercent = np.zeros(tsVals.size)
# vmax = 0
# for tind, t in enumerate(tsVals):
# if Lx == 60:
# CSAmp_ds = (qds_PaIBi['Real_CSAmp'] + 1j * qds_PaIBi['Imag_CSAmp']).sel(tc=t)
# else:
# CSAmp_ds = (qds_PaIBi['Real_CSAmp'] + 1j * qds_PaIBi['Imag_CSAmp']).sel(t=t)
# CSAmp_Vals = CSAmp_ds.values
# Nph_Vals[tind] = qds_PaIBi['Nph'].sel(t=t).values
# Pph_Vals[tind] = qds_PaIBi['Pph'].sel(t=t).values
# Pimp_Vals[tind] = P - Pph_Vals[tind]
# Bk_2D_vals = CSAmp_Vals.reshape((len(kVec), len(thVec)))
# PhDen_da.sel(t=t)[:] = ((1 / Nph_Vals[tind]) * np.abs(Bk_2D_vals)**2).real.astype(float)
# norm_tot = np.dot(PhDen_da.sel(t=t).values.flatten(), dVk)
# PhDen_IR = PhDen_da.sel(t=t).values[kIRmask]
# norm_IR = np.dot(PhDen_IR.flatten(), dVk_IR.flatten())
# norm_IRpercent[tind] = 100 * np.abs(norm_IR / norm_tot)
# # print(norm_IRpercent[tind])
# PhDen_ax = PhDen_da.sel(t=t).values[axmask]
# norm_ax = np.dot(PhDen_ax.flatten(), dVk_ax.flatten())
# norm_axpercent[tind] = 100 * np.abs(norm_ax / norm_tot)
# Omegak_da.sel(t=t)[:] = pfs.Omega(kgrid, Pimp_Vals[tind], mI, mB, n0, gBB).reshape((len(kVec), len(thVec))).real.astype(float)
# # print(Omegak_da.sel(t=t))
# maxval = np.max(PhDen_da.sel(t=t).values[np.logical_not(kIRmask)])
# if maxval > vmax:
# vmax = maxval
# # Plot slices
# tnorm = tsVals / tscale
# tnVals_des = np.array([0.5, 8.0, 15.0, 25.0, 40.0, 75.0])
# tninds = np.zeros(tnVals_des.size, dtype=int)
# for tn_ind, tn in enumerate(tnVals_des):
# tninds[tn_ind] = np.abs(tnorm - tn).argmin().astype(int)
# tslices = tsVals[tninds]
# print(vmax)
# vmin = 0
# if (vmaxAuto is False) and (Lx != 60):
# vmax = 800
# if shortTime is True:
# vmax = 200
# interpmul = 5
# if Lx == 60:
# PhDen0_interp_vals = PhDen_da.isel(t=0).values
# kxg_interp = kg * np.sin(thg)
# kzg_interp = kg * np.cos(thg)
# else:
# PhDen0_interp_vals, kg_interp, thg_interp = pfc.xinterp2D(PhDen_da.isel(t=0), 'k', 'th', interpmul)
# kxg_interp = kg_interp * np.sin(thg_interp)
# kzg_interp = kg_interp * np.cos(thg_interp)
# vmax = 3000
# fig, axes = plt.subplots(nrows=3, ncols=2)
# for tind, t in enumerate(tslices):
# if tind == 0:
# ax = axes[0, 0]
# elif tind == 1:
# ax = axes[0, 1]
# if tind == 2:
# ax = axes[1, 0]
# if tind == 3:
# ax = axes[1, 1]
# if tind == 4:
# ax = axes[2, 0]
# if tind == 5:
# ax = axes[2, 1]
# PhDen_interp_vals = PhDen_da.sel(t=t).values
# if vmaxAuto is True:
# quad1 = ax.pcolormesh(kzg_interp, kxg_interp, PhDen_interp_vals[:-1, :-1], norm=colors.LogNorm(vmin=1e-3, vmax=vmax), cmap='inferno')
# quad1m = ax.pcolormesh(kzg_interp, -1 * kxg_interp, PhDen_interp_vals[:-1, :-1], norm=colors.LogNorm(vmin=1e-3, vmax=vmax), cmap='inferno')
# else:
# quad1 = ax.pcolormesh(kzg_interp, kxg_interp, PhDen_interp_vals[:-1, :-1], vmin=vmin, vmax=vmax, cmap='inferno')
# quad1m = ax.pcolormesh(kzg_interp, -1 * kxg_interp, PhDen_interp_vals[:-1, :-1], vmin=vmin, vmax=vmax, cmap='inferno')
# curve1 = ax.plot(Pph_Vals[tninds[tind]], 0, marker='x', markersize=10, zorder=11, color="xkcd:steel grey")[0]
# curve1m = ax.plot(Pimp_Vals[tninds[tind]], 0, marker='o', markersize=10, zorder=11, color="xkcd:apple green")[0]
# curve2 = ax.plot(mc, 0, marker='*', markersize=10, zorder=11, color="cyan")[0]
# def rfunc(k): return (pfs.omegak(k, mB, n0, gBB) - 2 * np.pi / tsVals[tninds[tind]])
# kroot = fsolve(rfunc, 1e8); kroot = kroot[kroot >= 0]
# patch_Excitation = plt.Circle((0, 0), kroot[0], edgecolor='red', facecolor='None', linewidth=2)
# ax.add_patch(patch_Excitation)
# patch_klin = plt.Circle((0, 0), klin, edgecolor='tab:cyan', facecolor='None')
# ax.add_patch(patch_klin)
# if IRpatch is True:
# patch_IR = plt.Circle((0, 0), kIRcut, edgecolor='#8c564b', facecolor='#8c564b')
# ax.add_patch(patch_IR)
# IR_text = ax.text(0.61, 0.75, r'Weight (IR patch): ' + '{:.2f}%'.format(norm_IRpercent[tninds[tind]]), transform=ax.transAxes, fontsize='small', color='#8c564b')
# rem_text = ax.text(0.61, 0.675, r'Weight (Rem vis): ' + '{:.2f}%'.format(norm_axpercent[tninds[tind]]), transform=ax.transAxes, fontsize='small', color='yellow')
# if FGRBool is True:
# if Lx == 60:
# Omegak_interp_vals = Omegak_da.sel(t=t).values
# else:
# Omegak_interp_vals, kg_interp, thg_interp = pfc.xinterp2D(Omegak_da.sel(t=t), 'k', 'th', interpmul)
# FGRmask0 = np.abs(Omegak_interp_vals) < FGRlim
# Omegak_interp_vals[FGRmask0] = 1
# Omegak_interp_vals[np.logical_not(FGRmask0)] = 0
# p = []
# p.append(ax.contour(kzg_interp, kxg_interp, Omegak_interp_vals, zorder=10, colors='tab:gray'))
# p.append(ax.contour(kzg_interp, -1 * kxg_interp, Omegak_interp_vals, zorder=10, colors='tab:gray'))
# p.append(ax.contour(Pimp_Vals[tind] - kzg_interp, -1 * kxg_interp, Omegak_interp_vals, zorder=10, colors='xkcd:military green'))
# p.append(ax.contour(Pimp_Vals[tind] - kzg_interp, -1 * (-1) * kxg_interp, Omegak_interp_vals, zorder=10, colors='xkcd:military green'))
# ax.set_xlim([-1 * axislim, axislim])
# ax.set_ylim([-1 * axislim, axislim])
# ax.grid(True, linewidth=0.5)
# ax.set_title(r'$t$ [$\xi/c$]: ' + '{:1.2f}'.format(tsVals[tninds[tind]] / tscale))
# ax.set_xlabel(r'$k_{z}$')
# ax.set_ylabel(r'$k_{x}$')
# curve1_LE = Line2D([0], [0], color='none', lw=0, marker='x', markerfacecolor='xkcd:steel grey', markeredgecolor='xkcd:steel grey', markersize=10)
# curve1m_LE = Line2D([0], [0], color='none', lw=0, marker='o', markerfacecolor='xkcd:apple green', markeredgecolor='xkcd:apple green', markersize=10)
# curve2_LE = Line2D([0], [0], color='none', lw=0, marker='*', markerfacecolor='cyan', markeredgecolor='cyan', markersize=10)
# patch_Excitation_LE = Line2D([0], [0], marker='o', color='none', markerfacecolor='none', markeredgecolor='red', markersize=20, mew=2)
# patch_klin_LE = Line2D([0], [0], marker='o', color='none', markerfacecolor='none', markeredgecolor='tab:cyan', markersize=20, mew=2)
# patch_FGR_ph_LE = Ellipse(xy=(0, 0), width=0.2, height=0.1, angle=0, edgecolor='tab:gray', facecolor='none', lw=3)
# patch_FGR_imp_LE = Ellipse(xy=(0, 0), width=0.2, height=0.1, angle=0, edgecolor='xkcd:military green', facecolor='none', lw=3)
# if IRpatch is True:
# handles = (curve1_LE, curve1m_LE, curve2_LE, patch_Excitation_LE, patch_IR, patch_klin_LE, patch_FGR_ph_LE, patch_FGR_imp_LE)
# labels = (r'$\langle P_{ph} \rangle$', r'$\langle P_{I} \rangle$', r'$(m_{I}c)\vec{e}_{k_{z}}$', r'$\omega_{|k|}^{-1}(\frac{2\pi}{t})$', r'Singular Region', r'Linear Excitations', 'FGR Phase Space (ph)', 'FGR Phase Space (imp)')
# else:
# handles = (curve1_LE, curve1m_LE, curve2_LE, patch_Excitation_LE, patch_klin_LE, patch_FGR_ph_LE, patch_FGR_imp_LE)
# labels = (r'$\langle P_{ph} \rangle$', r'$\langle P_{I} \rangle$', r'$(m_{I}c)\vec{e}_{k_{z}}$', r'$\omega_{|k|}^{-1}(\frac{2\pi}{t})$', r'Linear Excitations', 'FGR Phase Space (ph)', 'FGR Phase Space (imp)')
# cbar_ax = fig.add_axes([0.9, 0.2, 0.02, 0.7])
# fig.colorbar(quad1, cax=cbar_ax, extend='both')
# fig.legend(handles, labels, ncol=4, loc='lower center', handler_map={Ellipse: HandlerEllipse()})
# fig.text(0.05, 0.97, '(a)', fontsize=20)
# fig.text(0.05, 0.68, '(c)', fontsize=20)
# fig.text(0.05, 0.38, '(e)', fontsize=20)
# fig.text(0.47, 0.97, '(b)', fontsize=20)
# fig.text(0.47, 0.68, '(d)', fontsize=20)
# fig.text(0.47, 0.38, '(f)', fontsize=20)
# fig.set_size_inches(12, 12)
# fig.subplots_adjust(bottom=0.17, top=0.95, right=0.85, hspace=0.6, wspace=0.4)
# # fig.savefig(figdatapath + '/Fig7.pdf', dpi=20)
# fig.savefig(figdatapath + '/Fig7.jpg', quality=20)
# plt.show()
| 46.869385 | 238 | 0.580168 | 9,507 | 67,820 | 3.984012 | 0.082045 | 0.006204 | 0.003696 | 0.008449 | 0.767108 | 0.728694 | 0.696774 | 0.673355 | 0.648564 | 0.633726 | 0 | 0.041338 | 0.232395 | 67,820 | 1,446 | 239 | 46.901798 | 0.686221 | 0.826305 | 0 | 0.019048 | 0 | 0 | 0.061397 | 0.021622 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.247619 | 0 | 0.247619 | 0.085714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
81221dad363411cd26893541cce65b95eee33b46 | 167 | py | Python | boundaries/tests/fixtures/foo_definition.py | paultag/represent-boundaries | f04c34e2329a5d464b7ad1ebae10e52c761a4e05 | [
"MIT"
] | null | null | null | boundaries/tests/fixtures/foo_definition.py | paultag/represent-boundaries | f04c34e2329a5d464b7ad1ebae10e52c761a4e05 | [
"MIT"
] | null | null | null | boundaries/tests/fixtures/foo_definition.py | paultag/represent-boundaries | f04c34e2329a5d464b7ad1ebae10e52c761a4e05 | [
"MIT"
] | null | null | null | from datetime import date
import boundaries
boundaries.register('Districts',
last_updated=date(2000, 1, 1),
name_func=boundaries.attr('id'),
file='foo.shp',
)
| 16.7 | 34 | 0.730539 | 23 | 167 | 5.217391 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041096 | 0.125749 | 167 | 9 | 35 | 18.555556 | 0.780822 | 0 | 0 | 0 | 0 | 0 | 0.107784 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.285714 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
812dfbb103ad263f6325afcf70e7ea639b86ed0c | 1,802 | py | Python | sdk/cogscale/client/results.py | CognitiveScale/industry-models | 9afc26f0b209fbec7bfd41e37d6ff7d86ea7d1c3 | [
"Apache-2.0"
] | null | null | null | sdk/cogscale/client/results.py | CognitiveScale/industry-models | 9afc26f0b209fbec7bfd41e37d6ff7d86ea7d1c3 | [
"Apache-2.0"
] | null | null | null | sdk/cogscale/client/results.py | CognitiveScale/industry-models | 9afc26f0b209fbec7bfd41e37d6ff7d86ea7d1c3 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2016 CognitiveScale, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from cogscale.util.attribute_getter import AttributeGetter
class Success(AttributeGetter):
"""
An instance of this class is returned from most operations when the request is successful. Call the name
of the resource (eg, dataset, project, etc) to get the object::
result = Dataset.read({..})
if result.is_success:
dataset = result.dataset
else:
print result.error
"""
@property
def is_success(self):
""" Returns whether the result from the server is a successful response. """
return True
class Error(AttributeGetter):
"""
An instance of this class is returned from most operations when there is a validation error.
"""
@property
def is_success(self):
""" Returns whether the result from the server is a successful response. """
return False
def create_error(r):
msg = None
try:
err_json = r.json()
msg = err_json["error"]
except ValueError:
pass
return Error({"error": "Server returned status code %d: %s" % (r.status_code, msg or r.text)})
def create_error_from_json(j):
return Error({"error": "Server returned error %s" % j["error"]}) | 30.033333 | 109 | 0.680355 | 247 | 1,802 | 4.919028 | 0.489879 | 0.049383 | 0.021399 | 0.026337 | 0.312757 | 0.263374 | 0.263374 | 0.263374 | 0.263374 | 0.263374 | 0 | 0.005785 | 0.232519 | 1,802 | 60 | 110 | 30.033333 | 0.87274 | 0.602664 | 0 | 0.210526 | 0 | 0 | 0.122642 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0.052632 | 0.052632 | 0.052632 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
813b49aa92c93c20eeef76ab8cec1fc1a66b6bf0 | 312 | py | Python | brian2/tests/features/__init__.py | rgerkin/brian2 | 34761a58b0d4c2275194e648449419b3dd73286b | [
"BSD-2-Clause"
] | 1 | 2019-08-17T21:19:03.000Z | 2019-08-17T21:19:03.000Z | brian2/tests/features/__init__.py | rgerkin/brian2 | 34761a58b0d4c2275194e648449419b3dd73286b | [
"BSD-2-Clause"
] | null | null | null | brian2/tests/features/__init__.py | rgerkin/brian2 | 34761a58b0d4c2275194e648449419b3dd73286b | [
"BSD-2-Clause"
] | null | null | null | from __future__ import absolute_import
__all__ = ['FeatureTest',
'SpeedTest',
'InaccuracyError',
'Configuration',
'run_feature_tests']
from .base import *
from . import neurongroup
from . import synapses
from . import monitors
from . import input
from . import speed
| 22.285714 | 38 | 0.666667 | 31 | 312 | 6.354839 | 0.580645 | 0.253807 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25641 | 312 | 13 | 39 | 24 | 0.849138 | 0 | 0 | 0 | 0 | 0 | 0.208333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.583333 | 0 | 0.583333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
d4901b60f5e53409fa7c847184375d49a9639f6c | 109 | py | Python | flatto/__init__.py | delta114514/flatto | 3e0fed5df42e963dd1383c77bbc8be75d4342e68 | [
"MIT"
] | 1 | 2020-01-03T05:28:27.000Z | 2020-01-03T05:28:27.000Z | flatto/__init__.py | delta114514/flatto | 3e0fed5df42e963dd1383c77bbc8be75d4342e68 | [
"MIT"
] | null | null | null | flatto/__init__.py | delta114514/flatto | 3e0fed5df42e963dd1383c77bbc8be75d4342e68 | [
"MIT"
] | null | null | null | from .__about__ import __version__
from .flatto import flatten
__all__ = [
__version__,
"flatten"
] | 13.625 | 34 | 0.715596 | 11 | 109 | 5.636364 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.211009 | 109 | 8 | 35 | 13.625 | 0.72093 | 0 | 0 | 0 | 0 | 0 | 0.063636 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
d49bca40fffeb76e78a4f55ce9c6477e2415a92c | 6,081 | py | Python | src/domainClient/models/domain_search_service_v2_model_domain_search_contracts_v2_price_details.py | diabolical-ninja/smart-property-search | 0931c7c8195ec21cbd56768c9c84cea2927a9e1d | [
"MIT"
] | 5 | 2021-04-12T04:10:42.000Z | 2021-04-28T05:54:22.000Z | src/domainClient/models/domain_search_service_v2_model_domain_search_contracts_v2_price_details.py | diabolical-ninja/smart-property-search | 0931c7c8195ec21cbd56768c9c84cea2927a9e1d | [
"MIT"
] | 35 | 2020-05-26T14:21:37.000Z | 2022-03-29T16:14:42.000Z | src/domainClient/models/domain_search_service_v2_model_domain_search_contracts_v2_price_details.py | diabolical-ninja/smart-property-search | 0931c7c8195ec21cbd56768c9c84cea2927a9e1d | [
"MIT"
] | 2 | 2020-05-26T14:02:12.000Z | 2022-01-10T08:19:49.000Z | # coding: utf-8
"""
Domain Group API V1
Provides public access to Domain's microservices # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'price': 'int',
'price_from': 'int',
'price_to': 'int',
'display_price': 'str'
}
attribute_map = {
'price': 'price',
'price_from': 'priceFrom',
'price_to': 'priceTo',
'display_price': 'displayPrice'
}
def __init__(self, price=None, price_from=None, price_to=None, display_price=None): # noqa: E501
"""DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails - a model defined in Swagger""" # noqa: E501
self._price = None
self._price_from = None
self._price_to = None
self._display_price = None
self.discriminator = None
if price is not None:
self.price = price
if price_from is not None:
self.price_from = price_from
if price_to is not None:
self.price_to = price_to
if display_price is not None:
self.display_price = display_price
@property
def price(self):
"""Gets the price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:return: The price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:rtype: int
"""
return self._price
@price.setter
def price(self, price):
"""Sets the price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails.
:param price: The price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:type: int
"""
self._price = price
@property
def price_from(self):
"""Gets the price_from of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:return: The price_from of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:rtype: int
"""
return self._price_from
@price_from.setter
def price_from(self, price_from):
"""Sets the price_from of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails.
:param price_from: The price_from of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:type: int
"""
self._price_from = price_from
@property
def price_to(self):
"""Gets the price_to of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:return: The price_to of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:rtype: int
"""
return self._price_to
@price_to.setter
def price_to(self, price_to):
"""Sets the price_to of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails.
:param price_to: The price_to of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:type: int
"""
self._price_to = price_to
@property
def display_price(self):
"""Gets the display_price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:return: The display_price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:rtype: str
"""
return self._display_price
@display_price.setter
def display_price(self, display_price):
"""Sets the display_price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails.
:param display_price: The display_price of this DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails. # noqa: E501
:type: str
"""
self._display_price = display_price
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DomainSearchServiceV2ModelDomainSearchContractsV2PriceDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.345361 | 132 | 0.63756 | 606 | 6,081 | 6.217822 | 0.194719 | 0.045382 | 0.284501 | 0.226115 | 0.505308 | 0.416932 | 0.353769 | 0.348461 | 0.170648 | 0.098195 | 0 | 0.02155 | 0.282684 | 6,081 | 193 | 133 | 31.507772 | 0.842274 | 0.394343 | 0 | 0.068966 | 1 | 0 | 0.044473 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16092 | false | 0 | 0.034483 | 0 | 0.344828 | 0.022989 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
d4c065fcf8aa39ba592c14e7c2e084fc2d4be65c | 1,997 | py | Python | kik_unofficial/protobuf/bridgetest/v1/authenticated_service_pb2.py | 3dik/kik-bot-api-unofficial | 78073fc8af50bf27b8c261533f9cde30f745412e | [
"MIT"
] | 120 | 2017-07-07T14:07:14.000Z | 2022-02-22T03:14:23.000Z | kik_unofficial/protobuf/bridgetest/v1/authenticated_service_pb2.py | TheGreatCodeholio/kik-bot-api-unofficial | c1f02b735dcb3e1d1b5e29a1ca04dd2f66557248 | [
"MIT"
] | 173 | 2017-07-07T04:31:19.000Z | 2021-11-20T02:59:20.000Z | kik_unofficial/protobuf/bridgetest/v1/authenticated_service_pb2.py | TheGreatCodeholio/kik-bot-api-unofficial | c1f02b735dcb3e1d1b5e29a1ca04dd2f66557248 | [
"MIT"
] | 101 | 2017-07-11T19:43:17.000Z | 2022-03-03T02:04:05.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bridgetest/v1/authenticated_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import kik_unofficial.protobuf.common_rpc_pb2 as common__rpc__pb2
from kik_unofficial.protobuf.bridgetest.v1 import bridgetest_common_pb2 as bridgetest_dot_v1_dot_bridgetest__common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='bridgetest/v1/authenticated_service.proto',
package='mobile.bridgetest.v1',
syntax='proto3',
serialized_pb=_b('\n)bridgetest/v1/authenticated_service.proto\x12\x14mobile.bridgetest.v1\x1a\x10\x63ommon_rpc.proto\x1a%bridgetest/v1/bridgetest_common.proto2\xcd\x02\n\rAuthenticated\x12M\n\x04\x45\x63ho\x12!.common.bridgetest.v1.EchoRequest\x1a\".common.bridgetest.v1.EchoResponse\x12\x45\n\x07Workout\x12$.common.bridgetest.v1.WorkoutRequest\x1a\x14.common.VoidResponse\x12\x36\n\tException\x12\x13.common.VoidRequest\x1a\x14.common.VoidResponse\x12n\n\x0fValidateHeaders\x12,.common.bridgetest.v1.ValidateHeadersRequest\x1a-.common.bridgetest.v1.ValidateHeadersResponseBh\n\x12\x63om.kik.bridgetestZRgithub.com/kikinteractive/xiphias-api-mobile/generated/go/bridgetest/v1;bridgetestb\x06proto3')
,
dependencies=[common__rpc__pb2.DESCRIPTOR,bridgetest_dot_v1_dot_bridgetest__common__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\022com.kik.bridgetestZRgithub.com/kikinteractive/xiphias-api-mobile/generated/go/bridgetest/v1;bridgetest'))
# @@protoc_insertion_point(module_scope)
| 55.472222 | 703 | 0.84026 | 267 | 1,997 | 6.044944 | 0.393258 | 0.104089 | 0.055762 | 0.074349 | 0.26456 | 0.153656 | 0.153656 | 0.153656 | 0.095415 | 0.095415 | 0 | 0.047644 | 0.054081 | 1,997 | 35 | 704 | 57.057143 | 0.806776 | 0.089634 | 0 | 0 | 1 | 0.1 | 0.475455 | 0.457805 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
d4cc1f3b3dd7ccbbaf31286955b08b336c5c3430 | 15,792 | py | Python | MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb256_map.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | 8 | 2021-12-14T21:30:01.000Z | 2022-02-14T11:30:03.000Z | MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb256_map.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | null | null | null | MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb256_map.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | null | null | null | from DejaVu.colorMap import ColorMap
from numpy import array
cm = ColorMap('rwb256')
cfg = {'name': 'rwb256', 'ramp': array([[ 1. , 0. , 0. , 1. ],
[ 0.00798478, 0.006 , 1. , 1. ],
[ 0.01297748, 0.011 , 1. , 1. ],
[ 0.02495463, 0.023 , 1. , 1. ],
[ 0.03094184, 0.029 , 1. , 1. ],
[ 0.03593225, 0.034 , 1. , 1. ],
[ 0.04790861, 0.046 , 1. , 1. ],
[ 0.0528977 , 0.051 , 1. , 1. ],
[ 0.06487406, 0.063 , 1. , 1. ],
[ 0.07086179, 0.069 , 1. , 1. ],
[ 0.07585198, 0.074 , 1. , 1. ],
[ 0.0878282 , 0.086 , 1. , 1. ],
[ 0.09281833, 0.091 , 1. , 1. ],
[ 0.09880612, 0.097 , 1. , 1. ],
[ 0.11078227, 0.109 , 1. , 1. ],
[ 0.11577237, 0.114 , 1. , 1. ],
[ 0.12774806, 0.126 , 1. , 1. ],
[ 0.13273776, 0.131 , 1. , 1. ],
[ 0.13872601, 0.13699999, 1. , 1. ],
[ 0.15070179, 0.149 , 1. , 1. ],
[ 0.15569188, 0.154 , 1. , 1. ],
[ 0.16168009, 0.16 , 1. , 1. ],
[ 0.17265806, 0.171 , 1. , 1. ],
[ 0.17864597, 0.177 , 1. , 1. ],
[ 0.19062206, 0.189 , 1. , 1. ],
[ 0.1956121 , 0.19400001, 1. , 1. ],
[ 0.20160003, 0.2 , 1. , 1. ],
[ 0.212578 , 0.211 , 1. , 1. ],
[ 0.21856593, 0.21699999, 1. , 1. ],
[ 0.22455387, 0.223 , 1. , 1. ],
[ 0.23553205, 0.234 , 1. , 1. ],
[ 0.24151999, 0.23999999, 1. , 1. ],
[ 0.25249797, 0.25099999, 1. , 1. ],
[ 0.25848609, 0.257 , 1. , 1. ],
[ 0.26447403, 0.26300001, 1. , 1. ],
[ 0.27545202, 0.27399999, 1. , 1. ],
[ 0.28143996, 0.28 , 1. , 1. ],
[ 0.28742805, 0.28600001, 1. , 1. ],
[ 0.29840603, 0.29699999, 1. , 1. ],
[ 0.30439401, 0.303 , 1. , 1. ],
[ 0.31537199, 0.31400001, 1. , 1. ],
[ 0.32135993, 0.31999999, 1. , 1. ],
[ 0.3273479 , 0.32600001, 1. , 1. ],
[ 0.33832601, 0.33700001, 1. , 1. ],
[ 0.34431398, 0.34299999, 1. , 1. ],
[ 0.35529196, 0.354 , 1. , 1. ],
[ 0.36128005, 0.36000001, 1. , 1. ],
[ 0.367268 , 0.366 , 1. , 1. ],
[ 0.37824601, 0.377 , 1. , 1. ],
[ 0.38423407, 0.38299999, 1. , 1. ],
[ 0.39022204, 0.389 , 1. , 1. ],
[ 0.40119994, 0.40000001, 1. , 1. ],
[ 0.407188 , 0.40599999, 1. , 1. ],
[ 0.41816598, 0.417 , 1. , 1. ],
[ 0.42415395, 0.42300001, 1. , 1. ],
[ 0.43014202, 0.42899999, 1. , 1. ],
[ 0.44112 , 0.44 , 1. , 1. ],
[ 0.44710797, 0.44600001, 1. , 1. ],
[ 0.45209798, 0.45100001, 1. , 1. ],
[ 0.46407402, 0.463 , 1. , 1. ],
[ 0.47006199, 0.46900001, 1. , 1. ],
[ 0.48104 , 0.47999999, 1. , 1. ],
[ 0.48702797, 0.486 , 1. , 1. ],
[ 0.49201798, 0.491 , 1. , 1. ],
[ 0.50399399, 0.50300002, 1. , 1. ],
[ 0.50998199, 0.509 , 1. , 1. ],
[ 0.51497197, 0.514 , 1. , 1. ],
[ 0.52694798, 0.52600002, 1. , 1. ],
[ 0.53193796, 0.53100002, 1. , 1. ],
[ 0.54391402, 0.54299998, 1. , 1. ],
[ 0.54990202, 0.54900002, 1. , 1. ],
[ 0.554892 , 0.55400002, 1. , 1. ],
[ 0.56686801, 0.56599998, 1. , 1. ],
[ 0.57185799, 0.57099998, 1. , 1. ],
[ 0.57784599, 0.57700002, 1. , 1. ],
[ 0.58982199, 0.58899999, 1. , 1. ],
[ 0.59481204, 0.59399998, 1. , 1. ],
[ 0.60678798, 0.60600001, 1. , 1. ],
[ 0.61177802, 0.611 , 1. , 1. ],
[ 0.61776602, 0.61699998, 1. , 1. ],
[ 0.62974203, 0.62900001, 1. , 1. ],
[ 0.63473201, 0.634 , 1. , 1. ],
[ 0.64072001, 0.63999999, 1. , 1. ],
[ 0.65169799, 0.65100002, 1. , 1. ],
[ 0.657686 , 0.65700001, 1. , 1. ],
[ 0.669662 , 0.66900003, 1. , 1. ],
[ 0.67465198, 0.67400002, 1. , 1. ],
[ 0.68063998, 0.68000001, 1. , 1. ],
[ 0.69161803, 0.69099998, 1. , 1. ],
[ 0.69760603, 0.69700003, 1. , 1. ],
[ 0.70958197, 0.70899999, 1. , 1. ],
[ 0.71457201, 0.71399999, 1. , 1. ],
[ 0.72056001, 0.72000003, 1. , 1. ],
[ 0.731538 , 0.73100001, 1. , 1. ],
[ 0.737526 , 0.73699999, 1. , 1. ],
[ 0.743514 , 0.74299997, 1. , 1. ],
[ 0.75449198, 0.75400001, 1. , 1. ],
[ 0.76047999, 0.75999999, 1. , 1. ],
[ 0.77145803, 0.77100003, 1. , 1. ],
[ 0.77744597, 0.77700001, 1. , 1. ],
[ 0.78343397, 0.78299999, 1. , 1. ],
[ 0.79441202, 0.79400003, 1. , 1. ],
[ 0.80040002, 0.80000001, 1. , 1. ],
[ 0.80638802, 0.80599999, 1. , 1. ],
[ 0.817366 , 0.81699997, 1. , 1. ],
[ 0.82335401, 0.82300001, 1. , 1. ],
[ 0.83433199, 0.83399999, 1. , 1. ],
[ 0.84031999, 0.83999997, 1. , 1. ],
[ 0.84630799, 0.84600002, 1. , 1. ],
[ 0.85728598, 0.85699999, 1. , 1. ],
[ 0.86327398, 0.86299998, 1. , 1. ],
[ 0.86926198, 0.86900002, 1. , 1. ],
[ 0.88024002, 0.88 , 1. , 1. ],
[ 0.88622802, 0.88599998, 1. , 1. ],
[ 0.89720601, 0.89700001, 1. , 1. ],
[ 0.90319401, 0.903 , 1. , 1. ],
[ 0.90918201, 0.90899998, 1. , 1. ],
[ 0.92016 , 0.92000002, 1. , 1. ],
[ 0.926148 , 0.926 , 1. , 1. ],
[ 0.93113798, 0.93099999, 1. , 1. ],
[ 0.94311398, 0.94300002, 1. , 1. ],
[ 0.94910198, 0.949 , 1. , 1. ],
[ 0.96008003, 0.95999998, 1. , 1. ],
[ 0.96606803, 0.96600002, 1. , 1. ],
[ 0.97105801, 0.97100002, 1. , 1. ],
[ 0.98303401, 0.98299998, 1. , 1. ],
[ 0.98902202, 0.98900002, 1. , 1. ],
[ 1. , 1. , 1. , 1. ],
[ 1. , 1. , 1. , 1. ],
[ 1. , 0.99400002, 0.99400002, 1. ],
[ 1. , 0.98900002, 0.98900002, 1. ],
[ 1. , 0.977 , 0.977 , 1. ],
[ 1. , 0.97100002, 0.97100002, 1. ],
[ 1. , 0.96600002, 0.96600002, 1. ],
[ 1. , 0.954 , 0.954 , 1. ],
[ 1. , 0.949 , 0.949 , 1. ],
[ 1. , 0.93699998, 0.93699998, 1. ],
[ 1. , 0.93099999, 0.93099999, 1. ],
[ 1. , 0.926 , 0.926 , 1. ],
[ 1. , 0.91399997, 0.91399997, 1. ],
[ 1. , 0.90899998, 0.90899998, 1. ],
[ 1. , 0.903 , 0.903 , 1. ],
[ 1. , 0.89099997, 0.89099997, 1. ],
[ 1. , 0.88599998, 0.88599998, 1. ],
[ 1. , 0.87400001, 0.87400001, 1. ],
[ 1. , 0.86900002, 0.86900002, 1. ],
[ 1. , 0.86299998, 0.86299998, 1. ],
[ 1. , 0.85100001, 0.85100001, 1. ],
[ 1. , 0.84600002, 0.84600002, 1. ],
[ 1. , 0.83999997, 0.83999997, 1. ],
[ 1. , 0.829 , 0.829 , 1. ],
[ 1. , 0.82300001, 0.82300001, 1. ],
[ 1. , 0.81099999, 0.81099999, 1. ],
[ 1. , 0.80599999, 0.80599999, 1. ],
[ 1. , 0.80000001, 0.80000001, 1. ],
[ 1. , 0.78899997, 0.78899997, 1. ],
[ 1. , 0.78299999, 0.78299999, 1. ],
[ 1. , 0.77700001, 0.77700001, 1. ],
[ 1. , 0.76599997, 0.76599997, 1. ],
[ 1. , 0.75999999, 0.75999999, 1. ],
[ 1. , 0.74900001, 0.74900001, 1. ],
[ 1. , 0.74299997, 0.74299997, 1. ],
[ 1. , 0.73699999, 0.73699999, 1. ],
[ 1. , 0.72600001, 0.72600001, 1. ],
[ 1. , 0.72000003, 0.72000003, 1. ],
[ 1. , 0.71399999, 0.71399999, 1. ],
[ 1. , 0.70300001, 0.70300001, 1. ],
[ 1. , 0.69700003, 0.69700003, 1. ],
[ 1. , 0.68599999, 0.68599999, 1. ],
[ 1. , 0.68000001, 0.68000001, 1. ],
[ 1. , 0.67400002, 0.67400002, 1. ],
[ 1. , 0.66299999, 0.66299999, 1. ],
[ 1. , 0.65700001, 0.65700001, 1. ],
[ 1. , 0.64600003, 0.64600003, 1. ],
[ 1. , 0.63999999, 0.63999999, 1. ],
[ 1. , 0.634 , 0.634 , 1. ],
[ 1. , 0.62300003, 0.62300003, 1. ],
[ 1. , 0.61699998, 0.61699998, 1. ],
[ 1. , 0.611 , 0.611 , 1. ],
[ 1. , 0.60000002, 0.60000002, 1. ],
[ 1. , 0.59399998, 0.59399998, 1. ],
[ 1. , 0.583 , 0.583 , 1. ],
[ 1. , 0.57700002, 0.57700002, 1. ],
[ 1. , 0.57099998, 0.57099998, 1. ],
[ 1. , 0.56 , 0.56 , 1. ],
[ 1. , 0.55400002, 0.55400002, 1. ],
[ 1. , 0.54900002, 0.54900002, 1. ],
[ 1. , 0.537 , 0.537 , 1. ],
[ 1. , 0.53100002, 0.53100002, 1. ],
[ 1. , 0.51999998, 0.51999998, 1. ],
[ 1. , 0.514 , 0.514 , 1. ],
[ 1. , 0.509 , 0.509 , 1. ],
[ 1. , 0.49700001, 0.49700001, 1. ],
[ 1. , 0.491 , 0.491 , 1. ],
[ 1. , 0.486 , 0.486 , 1. ],
[ 1. , 0.47400001, 0.47400001, 1. ],
[ 1. , 0.46900001, 0.46900001, 1. ],
[ 1. , 0.45699999, 0.45699999, 1. ],
[ 1. , 0.45100001, 0.45100001, 1. ],
[ 1. , 0.44600001, 0.44600001, 1. ],
[ 1. , 0.43399999, 0.43399999, 1. ],
[ 1. , 0.42899999, 0.42899999, 1. ],
[ 1. , 0.42300001, 0.42300001, 1. ],
[ 1. , 0.41100001, 0.41100001, 1. ],
[ 1. , 0.40599999, 0.40599999, 1. ],
[ 1. , 0.39399999, 0.39399999, 1. ],
[ 1. , 0.389 , 0.389 , 1. ],
[ 1. , 0.38299999, 0.38299999, 1. ],
[ 1. , 0.37099999, 0.37099999, 1. ],
[ 1. , 0.366 , 0.366 , 1. ],
[ 1. , 0.36000001, 0.36000001, 1. ],
[ 1. , 0.34900001, 0.34900001, 1. ],
[ 1. , 0.34299999, 0.34299999, 1. ],
[ 1. , 0.331 , 0.331 , 1. ],
[ 1. , 0.32600001, 0.32600001, 1. ],
[ 1. , 0.31999999, 0.31999999, 1. ],
[ 1. , 0.30899999, 0.30899999, 1. ],
[ 1. , 0.303 , 0.303 , 1. ],
[ 1. , 0.29100001, 0.29100001, 1. ],
[ 1. , 0.28600001, 0.28600001, 1. ],
[ 1. , 0.28 , 0.28 , 1. ],
[ 1. , 0.26899999, 0.26899999, 1. ],
[ 1. , 0.26300001, 0.26300001, 1. ],
[ 1. , 0.257 , 0.257 , 1. ],
[ 1. , 0.24600001, 0.24600001, 1. ],
[ 1. , 0.23999999, 0.23999999, 1. ],
[ 1. , 0.229 , 0.229 , 1. ],
[ 1. , 0.223 , 0.223 , 1. ],
[ 1. , 0.21699999, 0.21699999, 1. ],
[ 1. , 0.206 , 0.206 , 1. ],
[ 1. , 0.2 , 0.2 , 1. ],
[ 1. , 0.19400001, 0.19400001, 1. ],
[ 1. , 0.183 , 0.183 , 1. ],
[ 1. , 0.177 , 0.177 , 1. ],
[ 1. , 0.16599999, 0.16599999, 1. ],
[ 1. , 0.16 , 0.16 , 1. ],
[ 1. , 0.154 , 0.154 , 1. ],
[ 1. , 0.14300001, 0.14300001, 1. ],
[ 1. , 0.13699999, 0.13699999, 1. ],
[ 1. , 0.131 , 0.131 , 1. ],
[ 1. , 0.12 , 0.12 , 1. ],
[ 1. , 0.114 , 0.114 , 1. ],
[ 1. , 0.103 , 0.103 , 1. ],
[ 1. , 0.097 , 0.097 , 1. ],
[ 1. , 0.091 , 0.091 , 1. ],
[ 1. , 0.08 , 0.08 , 1. ],
[ 1. , 0.074 , 0.074 , 1. ],
[ 1. , 0.069 , 0.069 , 1. ],
[ 1. , 0.057 , 0.057 , 1. ],
[ 1. , 0.051 , 0.051 , 1. ],
[ 1. , 0.04 , 0.04 , 1. ],
[ 1. , 0.034 , 0.034 , 1. ],
[ 1. , 0.029 , 0.029 , 1. ],
[ 1. , 0.017 , 0.017 , 1. ],
[ 1. , 0.011 , 0.011 , 1. ],
[ 1. , 0. , 0. , 1. ]],'f'), 'maxi': 10.0, 'mini': 0.0}
cm.configure(*(), **cfg)
| 60.505747 | 93 | 0.280648 | 1,555 | 15,792 | 2.850161 | 0.204502 | 0.11778 | 0.170578 | 0.00722 | 0.352211 | 0.002482 | 0.002482 | 0.002482 | 0.002482 | 0.002482 | 0 | 0.614783 | 0.554521 | 15,792 | 260 | 94 | 60.738462 | 0.01521 | 0 | 0 | 0.007692 | 0 | 0 | 0.001836 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007692 | 0 | 0.007692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
d4da2d225ec548a20e2c3ebe334a6e53fa2ab267 | 466 | py | Python | zeugs/test_compat.py | gradgrind/Zeugs | 56361a63f245ac15a8cd21c7316879dc944609db | [
"Apache-2.0"
] | null | null | null | zeugs/test_compat.py | gradgrind/Zeugs | 56361a63f245ac15a8cd21c7316879dc944609db | [
"Apache-2.0"
] | null | null | null | zeugs/test_compat.py | gradgrind/Zeugs | 56361a63f245ac15a8cd21c7316879dc944609db | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
test_compat.py
Last updated: 2019-09-27
"""
from wz_core.reporting import Report
from test_core import testinit, runTests
if __name__ == '__main__':
testinit ()
# from wz_compat import migrate
# runTests (migrate)
# from wz_compat import import_pupils
# runTests (import_pupils)
from wz_compat import config
runTests (config)
from wz_compat import grades
runTests (grades)
| 16.642857 | 40 | 0.693133 | 61 | 466 | 5.016393 | 0.491803 | 0.098039 | 0.156863 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027174 | 0.2103 | 466 | 27 | 41 | 17.259259 | 0.804348 | 0.444206 | 0 | 0 | 0 | 0 | 0.03252 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
d4da4728baf44d511f8530c9557ed33df125b7b8 | 98 | py | Python | custom_auth/custom_login/apps.py | farhadmpr/DjangoMobileLogin | 8d1067dea428668a45bd48b6f7a32c7f7a3639d9 | [
"MIT"
] | null | null | null | custom_auth/custom_login/apps.py | farhadmpr/DjangoMobileLogin | 8d1067dea428668a45bd48b6f7a32c7f7a3639d9 | [
"MIT"
] | null | null | null | custom_auth/custom_login/apps.py | farhadmpr/DjangoMobileLogin | 8d1067dea428668a45bd48b6f7a32c7f7a3639d9 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class CustomLoginConfig(AppConfig):
name = 'custom_login'
| 16.333333 | 35 | 0.77551 | 11 | 98 | 6.818182 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153061 | 98 | 5 | 36 | 19.6 | 0.903614 | 0 | 0 | 0 | 0 | 0 | 0.122449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
d4e41964cc9f0b5f77f5d2b8239b061f17f6a34f | 258 | py | Python | selenium_browsermob/settings.py | sshevlyagin/selenium-aws-fargate-demo | 95764e2cc874a31cf326cacc8be4b80146da15d0 | [
"Apache-2.0"
] | 2 | 2020-10-11T04:54:39.000Z | 2021-03-02T18:43:07.000Z | selenium_exceptions/settings.py | sshevlyagin/selenium-aws-fargate-demo | 95764e2cc874a31cf326cacc8be4b80146da15d0 | [
"Apache-2.0"
] | null | null | null | selenium_exceptions/settings.py | sshevlyagin/selenium-aws-fargate-demo | 95764e2cc874a31cf326cacc8be4b80146da15d0 | [
"Apache-2.0"
] | null | null | null | class Config(object):
CHROME_PATH = '/Library/Application Support/Google/chromedriver76.0.3809.68'
BROWSERMOB_PATH = '/usr/local/bin/browsermob-proxy-2.1.4/bin/browsermob-proxy'
class Docker(Config):
CHROME_PATH = '/usr/local/bin/chromedriver'
| 32.25 | 82 | 0.748062 | 35 | 258 | 5.428571 | 0.657143 | 0.105263 | 0.126316 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.052174 | 0.108527 | 258 | 7 | 83 | 36.857143 | 0.773913 | 0 | 0 | 0 | 0 | 0.2 | 0.562016 | 0.48062 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
d4fb320ea473ec902177ab0875b121f2e54a3436 | 96 | py | Python | public/__init__.py | dscdtc/ApiTester | 7db88e5b1b8509d34ec5b95c761db194596abdab | [
"Apache-2.0"
] | null | null | null | public/__init__.py | dscdtc/ApiTester | 7db88e5b1b8509d34ec5b95c761db194596abdab | [
"Apache-2.0"
] | null | null | null | public/__init__.py | dscdtc/ApiTester | 7db88e5b1b8509d34ec5b95c761db194596abdab | [
"Apache-2.0"
] | null | null | null | __author__ = 'tsbc'
import send_email
import httprequest
import bar
import get_token
import log
| 13.714286 | 19 | 0.833333 | 14 | 96 | 5.285714 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135417 | 96 | 6 | 20 | 16 | 0.891566 | 0 | 0 | 0 | 0 | 0 | 0.041667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.833333 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
d4fbb244496b3271b3067d2aaf79d3705b9577b7 | 133 | py | Python | helper/reverse_example.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | 5 | 2021-02-22T08:39:55.000Z | 2022-03-14T03:54:36.000Z | helper/reverse_example.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | null | null | null | helper/reverse_example.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | null | null | null | numbers = [1, 2, 3, 4, 5]
for x in numbers:
print(x)
print("--------")
for x in reversed(numbers):
print(x)
print(numbers) | 13.3 | 27 | 0.56391 | 22 | 133 | 3.409091 | 0.5 | 0.106667 | 0.16 | 0.48 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048077 | 0.218045 | 133 | 10 | 28 | 13.3 | 0.673077 | 0 | 0 | 0.285714 | 0 | 0 | 0.059701 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.571429 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
be0820484dca250166e5caa16b10d2e638719b4c | 58 | py | Python | python/testData/codeInsight/controlflow/tryraisefinally.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/codeInsight/controlflow/tryraisefinally.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/codeInsight/controlflow/tryraisefinally.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | try:
raise KeyboardInterrupt
finally:
print 'test' | 14.5 | 27 | 0.706897 | 6 | 58 | 6.833333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.224138 | 58 | 4 | 28 | 14.5 | 0.911111 | 0 | 0 | 0 | 0 | 0 | 0.067797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
be1f9d9c745d2d1e2d294518281c54331025095c | 759 | py | Python | django_extended/fields/icon.py | dalou/django-extended | a7ba952ea7089cfb319b4615ae098579c9ab14f9 | [
"BSD-3-Clause"
] | 1 | 2015-12-14T17:16:04.000Z | 2015-12-14T17:16:04.000Z | django_extended/fields/icon.py | dalou/django-extended | a7ba952ea7089cfb319b4615ae098579c9ab14f9 | [
"BSD-3-Clause"
] | null | null | null | django_extended/fields/icon.py | dalou/django-extended | a7ba952ea7089cfb319b4615ae098579c9ab14f9 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from django.db import models
from django import forms
from django.utils.text import capfirst
from ..forms import IconField as IconFormField, IconInput
class IconField(models.CharField):
"""
A text field made to accept hexadecimal color value (#FFFFFF)
with a color picker widget.
"""
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 7
super(IconField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs['widget'] = IconInput
return super(IconField, self).formfield(**kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^django_extended\.fields\.IconField"])
except ImportError:
pass | 28.111111 | 72 | 0.693017 | 90 | 759 | 5.688889 | 0.577778 | 0.058594 | 0.070313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003257 | 0.191041 | 759 | 27 | 73 | 28.111111 | 0.830619 | 0.147563 | 0 | 0 | 0 | 0 | 0.08121 | 0.055732 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.375 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 3 |
be2a105430933cdcb5f75242c3ed64e915d45126 | 2,963 | py | Python | enos/message/upstream/topo/SubDeviceInfo.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 5 | 2020-05-09T09:21:22.000Z | 2021-05-31T02:42:56.000Z | enos/message/upstream/topo/SubDeviceInfo.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 4 | 2020-04-18T03:28:09.000Z | 2021-05-28T09:41:05.000Z | enos/message/upstream/topo/SubDeviceInfo.py | charleshuangcai/enos-device-sdk-python | bbbb31f4da7c4c4e6f457565faf0fb5cf15bb308 | [
"MIT"
] | 7 | 2020-01-19T07:58:28.000Z | 2021-12-22T06:53:43.000Z | import time
from enos.core.util.Deprecation import deprecated
from enos.core.util.SignUtil import SignUtil
class SubDeviceInfo(object):
def __init__(self, product_key, device_key, device_secret, sign_method=SignUtil.DEFAULT_SIGN_METHOD):
self.__product_key = product_key
self.__device_key = device_key
self.__device_secret = device_secret
self.__timestamp = int(time.time() * 1000)
self.__client_id = self.get_default_client_id(product_key, device_key)
self.__sign_method = sign_method
sign_params = dict()
sign_params['productKey'] = product_key
sign_params['deviceKey'] = device_key
sign_params['clientId'] = self.__client_id
sign_params['timestamp'] = str(self.__timestamp)
self.sign = SignUtil.sign(device_secret, sign_params, self.__sign_method)
def get_default_client_id(self, product_key, device_key):
return "{}.{}.{}".format(product_key, device_key, str(self.__timestamp))
def get_client_id(self):
return self.__client_id
def get_timestamp(self):
return self.__timestamp
def get_sign_method(self):
return self.__sign_method
def set_sign_method(self, sign_method):
self.__sign_method = sign_method
def get_sign(self):
return self.sign
def create_sign_map(self):
params = dict()
params['productKey'] = self.__product_key
params['deviceKey'] = self.__device_key
params['clientId'] = self.__client_id
params['timestamp'] = str(self.__timestamp)
params['signMethod'] = self.__sign_method
params['sign'] = self.sign
return params
def get_product_key(self):
return self.__product_key
def get_device_key(self):
return self.__device_key
def set_product_key(self, product_key):
self.__product_key = product_key
return self
def set_device_key(self, device_key):
self.__device_key = device_key
return self
@deprecated
def getDefaultClientId(self, product_key, device_key):
return self.get_default_client_id(product_key, device_key)
@deprecated
def getClientId(self):
return self.get_client_id()
@deprecated
def getTimestamp(self):
return self.get_timestamp()
@deprecated
def getSignMethod(self):
return self.get_sign_method()
@deprecated
def setSignMethod(self, sign_method):
self.set_sign_method(sign_method)
@deprecated
def getSign(self):
return self.get_sign()
@deprecated
def createSignMap(self):
return self.create_sign_map()
@deprecated
def getProductKey(self):
return self.get_product_key()
@deprecated
def setProductKey(self, product_key):
return self.set_product_key(product_key)
@deprecated
def setDeviceKey(self, device_key):
return self.set_device_key(device_key)
| 28.219048 | 105 | 0.681741 | 363 | 2,963 | 5.146006 | 0.137741 | 0.107066 | 0.089936 | 0.061028 | 0.288544 | 0.101713 | 0.043897 | 0.043897 | 0.043897 | 0 | 0 | 0.001754 | 0.23051 | 2,963 | 104 | 106 | 28.490385 | 0.817544 | 0 | 0 | 0.230769 | 0 | 0 | 0.031725 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.282051 | false | 0 | 0.038462 | 0.205128 | 0.576923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
be2d25b687b4156fe75be371d8c1b622c87ef65d | 336 | py | Python | Python/263ugly_number.py | Apocrypse/LeetCode | 3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39 | [
"MIT"
] | 4 | 2020-03-17T03:08:51.000Z | 2022-03-14T17:33:28.000Z | Python/263ugly_number.py | Apocrypse/LeetCode | 3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39 | [
"MIT"
] | null | null | null | Python/263ugly_number.py | Apocrypse/LeetCode | 3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39 | [
"MIT"
] | 3 | 2021-04-29T16:51:02.000Z | 2022-03-19T17:37:56.000Z | class Solution:
def isUgly(self, num):
"""
:type num: int
:rtype: bool
"""
while not num:
return False
while not (num % 2):
num //= 2
while not (num % 3):
num //= 3
while not (num % 5):
num //= 5
return num == 1
| 21 | 28 | 0.380952 | 37 | 336 | 3.459459 | 0.486486 | 0.25 | 0.34375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041916 | 0.502976 | 336 | 15 | 29 | 22.4 | 0.724551 | 0.080357 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
077567e14fef2df079a12737a88a46013ee2903e | 514 | py | Python | tests/anyorderlist.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | 8 | 2020-07-23T16:34:57.000Z | 2022-02-25T02:43:15.000Z | tests/anyorderlist.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | 6 | 2020-07-23T16:43:09.000Z | 2020-12-16T14:29:37.000Z | tests/anyorderlist.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | 6 | 2020-07-14T22:17:00.000Z | 2022-03-07T13:05:59.000Z | class AnyOrderList:
"""Sequence that compares to a list, but allows any order.
This is only intended for comparison purposes, not as an actual list replacement.
"""
def __init__(self, list_):
self._list = list_
self._list.sort()
def __eq__(self, other):
assert isinstance(other, list)
return self._list == sorted(other)
def __str__(self):
return str(self._list)
def __repr__(self):
return "AnyOrderList({})".format(repr(self._list))
| 25.7 | 85 | 0.638132 | 64 | 514 | 4.765625 | 0.578125 | 0.157377 | 0.078689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.256809 | 514 | 19 | 86 | 27.052632 | 0.798429 | 0.268482 | 0 | 0 | 0 | 0 | 0.044321 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 1 | 0.363636 | false | 0 | 0 | 0.181818 | 0.727273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
0783cf9c9bb1acdc46011084bd57292ce1d51efd | 2,272 | py | Python | tests/test_transport.py | mjmunger/PolyPyTools | 116014a47479f360ee73006d6ba3ddc7f362c7a1 | [
"MIT"
] | 7 | 2017-11-15T19:25:37.000Z | 2022-01-20T01:30:56.000Z | tests/test_transport.py | DrDamnit/PolyPyTools | 116014a47479f360ee73006d6ba3ddc7f362c7a1 | [
"MIT"
] | 40 | 2020-05-19T19:46:20.000Z | 2020-11-12T16:13:55.000Z | tests/test_transport.py | mjmunger/PolyPyTools | 116014a47479f360ee73006d6ba3ddc7f362c7a1 | [
"MIT"
] | null | null | null | import unittest
from unittest_data_provider import data_provider
from poly_py_tools.pjsip.transport import Transport
class TestTransport(unittest.TestCase):
provider_test_init = lambda :(
#section expected_attributes
(["[sampletransport]", "async_operations=BwBCL4", "bind=5UIfQtXOX", "ca_list_file=mUMG8", "cert_file=4pSIS", "cipher=Q6w8SVxMMaNmyXMXI4iK", "domain=MPB", "external_media_address=7P9cAxJUBJJz66fHAXsA", "external_signaling_address=IcGKw", "external_signaling_port=egggXpOjpdGfqc1", "method=zAOasIGWr2GepUy4", "local_net=y9tJYJtVK", "password=L9mJD7RzIX", "priv_key_file=QFy6Rcnx1x5AvkIVom", "protocol=jA0JTSjqYFb0iLDs", "require_client_cert=DUdQlqZu5hUdpSXZ", "type=YIQjFDSXWfVku", "verify_client=4Ey2EjhN3XYbFD8ufl", "verify_server=XONxHpKl", "tos=Td2", "cos=kqBcmurbLzz", "websocket_write_timeout=U6PKOX7fAFCrrie2dF"], {"async_operations":"BwBCL4", "bind":"5UIfQtXOX", "ca_list_file":"mUMG8", "cert_file":"4pSIS", "cipher":"Q6w8SVxMMaNmyXMXI4iK", "domain":"MPB", "external_media_address":"7P9cAxJUBJJz66fHAXsA", "external_signaling_address":"IcGKw", "external_signaling_port":"egggXpOjpdGfqc1", "method":"zAOasIGWr2GepUy4", "local_net":"y9tJYJtVK", "password":"L9mJD7RzIX", "priv_key_file":"QFy6Rcnx1x5AvkIVom", "protocol":"jA0JTSjqYFb0iLDs", "require_client_cert":"DUdQlqZu5hUdpSXZ", "type":"YIQjFDSXWfVku", "verify_client":"4Ey2EjhN3XYbFD8ufl", "verify_server":"XONxHpKl", "tos":"Td2", "cos":"kqBcmurbLzz", "websocket_write_timeout":"U6PKOX7fAFCrrie2dF"}),
)
@data_provider(provider_test_init)
def test_init(self, section, expected_attributes):
transport = Transport(section)
self.assertEqual(section, transport.section)
transport.set_attributes()
for attribute in expected_attributes:
expected_value = expected_attributes[attribute]
actual_value = getattr(transport, attribute)
self.assertEqual(expected_value, actual_value, "endpoint.{} should be {}. Got {} instead.".format(attribute, expected_value, actual_value))
if __name__ == '__main__':
unittest.main()
| 78.344828 | 1,266 | 0.701144 | 211 | 2,272 | 7.227488 | 0.43128 | 0.047213 | 0.020984 | 0.032787 | 0.617705 | 0.617705 | 0.617705 | 0.617705 | 0.617705 | 0.617705 | 0 | 0.032961 | 0.172095 | 2,272 | 28 | 1,267 | 81.142857 | 0.777778 | 0.090229 | 0 | 0 | 0 | 0 | 0.516457 | 0.227493 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.055556 | false | 0.055556 | 0.166667 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
0789e603f0ce40516024958644210541e9c18969 | 670 | py | Python | silica/transformations/replace_symbols.py | leonardt/fsm_dsl | 94bc277c7286c9587a8ad9d3a48fa021ba72686e | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | silica/transformations/replace_symbols.py | leonardt/fsm_dsl | 94bc277c7286c9587a8ad9d3a48fa021ba72686e | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | silica/transformations/replace_symbols.py | leonardt/fsm_dsl | 94bc277c7286c9587a8ad9d3a48fa021ba72686e | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | import ast
from copy import deepcopy
class SymbolReplacer(ast.NodeTransformer):
def __init__(self, symbol_table, ctx):
self.symbol_table = symbol_table
self.ctx = ctx
def visit_Name(self, node):
if node.id in self.symbol_table:
if self.ctx is None:
new_node = deepcopy(self.symbol_table[node.id])
new_node.ctx = node.ctx
return new_node
elif isinstance(node.ctx, self.ctx):
return deepcopy(self.symbol_table[node.id])
return node
def replace_symbols(tree, symbol_table, ctx=None):
return SymbolReplacer(symbol_table, ctx).visit(tree)
| 30.454545 | 63 | 0.637313 | 87 | 670 | 4.712644 | 0.333333 | 0.214634 | 0.182927 | 0.112195 | 0.141463 | 0.141463 | 0 | 0 | 0 | 0 | 0 | 0 | 0.279104 | 670 | 21 | 64 | 31.904762 | 0.848861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0 | 0.117647 | 0.058824 | 0.588235 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
079a512d48ec8f497c074a16bdd633bb33958246 | 190 | py | Python | rentomatic/repository/memrepo.py | danlgz/rentomatic | f5be1d19abb6a1b73ab9e80bf01217ebea645c3b | [
"MIT"
] | null | null | null | rentomatic/repository/memrepo.py | danlgz/rentomatic | f5be1d19abb6a1b73ab9e80bf01217ebea645c3b | [
"MIT"
] | null | null | null | rentomatic/repository/memrepo.py | danlgz/rentomatic | f5be1d19abb6a1b73ab9e80bf01217ebea645c3b | [
"MIT"
] | null | null | null | import dataclasses
from rentomatic.domain.room import Room
@dataclasses.dataclass
class MemRepo:
data: list
def list(self):
return [Room.from_dict(d) for d in self.data]
| 15.833333 | 53 | 0.715789 | 27 | 190 | 5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205263 | 190 | 11 | 54 | 17.272727 | 0.89404 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0.142857 | 0.857143 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
07a1373c044d73837f4c5b57a7f55a0a689bd1be | 9,672 | py | Python | 15x15/play_alphazero_guerzhoy.py | jonah-chen/alphazero-guerzhoy | 9e45994d8fc687001bb98caefa2f89a7b4707ea1 | [
"MIT"
] | null | null | null | 15x15/play_alphazero_guerzhoy.py | jonah-chen/alphazero-guerzhoy | 9e45994d8fc687001bb98caefa2f89a7b4707ea1 | [
"MIT"
] | 2 | 2020-12-15T00:31:24.000Z | 2021-11-08T09:11:20.000Z | 15x15/play_alphazero_guerzhoy.py | jonah-chen/alphazero-guerzhoy | 9e45994d8fc687001bb98caefa2f89a7b4707ea1 | [
"MIT"
] | 1 | 2020-11-29T21:45:44.000Z | 2020-11-29T21:45:44.000Z | from tensorflow.keras.models import load_model
import numpy as np
from game import move_on_board, print_board
from mcts import optimized_search
from tkinter import Tk, Label, Button, DISABLED
from PIL import ImageTk, Image
from nptrain import is_win
root = Tk()
root.title("Play AlphaZero Guerzhoy")
GOOD_MODELS = [0,2,8,9,13,19,24,35,39,58,60,91,101]
black_stone = ImageTk.PhotoImage(Image.open("images/blackstone.png").resize((100,100)))
white_stone = ImageTk.PhotoImage(Image.open("images/whitestone.png").resize((100,100)))
score = [0, 0] # Your score, Computer Score
def computer_move(model, board):
"""Makes a move by using the A.I. on a board board
Args:
model (tf.keras.models.Model): The trained A.I. Model
board (np.array (shape=(1,8,8,2,))): The board
Returns:
void
"""
move = optimized_search(model, board, [1 if player else 2], it=diff)[0].play(0)[0]
move_on_board(board[0], move, 1 if player else 2)
return move//8, move%8
def load(i):
global model
model = load_model(f"models/{GOOD_MODELS[i]}.h5")
def play_as_white():
global player
player = 0
try:
x, y = computer_move(model, board)
except NameError:
Label(root, text="Please choose an A.I. to play against.").grid(row=4, column=9, columnspan=3)
return
board_buttons[x][y].grid_forget()
board_buttons[x][y] = Button(root, image=white_stone if player else black_stone, state=DISABLED, padx=50, pady=50)
board_buttons[x][y].grid(row=x, column=y)
def play_as_black():
global player
player = 1
def move(x, y):
global board_buttons
try:
move_on_board(board[0], x*8+y, 2 if player else 1)
except NameError:
Label(root, text="Please choose to be either white or black.").grid(row=4, column=9, columnspan=3)
return
board_buttons[x][y].grid_forget()
board_buttons[x][y] = Button(root, image=black_stone if player else white_stone, state=DISABLED, padx=50, pady=50)
board_buttons[x][y].grid(row=x, column=y)
result = is_win(board[0]) # Check if someone wins
if result == 2:
if player:
score[0] += 1
Label(root, text=f"Player wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
else:
score[1] += 1
Label(root, text=f"Computer wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
if result == 1:
if player:
score[1] += 1
Label(root, text=f"Computer wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
else:
score[0] += 1
Label(root, text=f"Player wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
if result == 3:
score[0] += 0.5
score[1] += 0.5
Label(root, text=f"Draw! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
# The computer makes a move in response.
try:
x, y = computer_move(model, board)
except NameError:
Label(root, text="Please choose an A.I. to play against.").grid(row=4, column=9, columnspan=3)
return
board_buttons[x][y].grid_forget()
board_buttons[x][y] = Button(root, image=white_stone if player else black_stone, state=DISABLED, padx=50, pady=50)
board_buttons[x][y].grid(row=x, column=y)
result = is_win(board[0]) # Check if someone wins
if result == 2:
if player:
score[0] += 1
Label(root, text=f"Player wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
else:
score[1] += 1
Label(root, text=f"Computer wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
if result == 1:
if player:
score[1] += 1
Label(root, text=f"Computer wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
else:
score[0] += 1
Label(root, text=f"Player wins! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
if result == 3:
score[0] += 0.5
score[1] += 0.5
Label(root, text=f"Draw! Reset to play again.").grid(row=4, column=9, columnspan=3)
return
def select_diff(difficulty):
global diff
diff = difficulty
def resign():
global score
score[1] += 1
init_game()
def init_game():
global board_buttons, board, diff, player
try:
del player
except NameError:
pass
diff = 512
board_buttons = [[Button(root, command=lambda: move(0, 0), padx=50, pady=50), Button(root, command=lambda: move(0, 1), padx=50, pady=50), Button(root, command=lambda: move(0, 2), padx=50, pady=50), Button(root, command=lambda: move(0, 3), padx=50, pady=50), Button(root, command=lambda: move(0, 4), padx=50, pady=50), Button(root, command=lambda: move(0, 5), padx=50, pady=50), Button(root, command=lambda: move(0, 6), padx=50, pady=50), Button(root, command=lambda: move(0, 7), padx=50, pady=50)], [Button(root, command=lambda: move(1, 0), padx=50, pady=50), Button(root, command=lambda: move(1, 1), padx=50, pady=50), Button(root, command=lambda: move(1, 2), padx=50, pady=50), Button(root, command=lambda: move(1, 3), padx=50, pady=50), Button(root, command=lambda: move(1, 4), padx=50, pady=50), Button(root, command=lambda: move(1, 5), padx=50, pady=50), Button(root, command=lambda: move(1, 6), padx=50, pady=50), Button(root, command=lambda: move(1, 7), padx=50, pady=50)], [Button(root, command=lambda: move(2, 0), padx=50, pady=50), Button(root, command=lambda: move(2, 1), padx=50, pady=50), Button(root, command=lambda: move(2, 2), padx=50, pady=50), Button(root, command=lambda: move(2, 3), padx=50, pady=50), Button(root, command=lambda: move(2, 4), padx=50, pady=50), Button(root, command=lambda: move(2, 5), padx=50, pady=50), Button(root, command=lambda: move(2, 6), padx=50, pady=50), Button(root, command=lambda: move(2, 7), padx=50, pady=50)], [Button(root, command=lambda: move(3, 0), padx=50, pady=50), Button(root, command=lambda: move(3, 1), padx=50, pady=50), Button(root, command=lambda: move(3, 2), padx=50, pady=50), Button(root, command=lambda: move(3, 3), padx=50, pady=50), Button(root, command=lambda: move(3, 4), padx=50, pady=50), Button(root, command=lambda: move(3, 5), padx=50, pady=50), Button(root, command=lambda: move(3, 6), padx=50, pady=50), Button(root, command=lambda: move(3, 7), padx=50, pady=50)], [Button(root, command=lambda: move(4, 0), padx=50, pady=50), Button(root, command=lambda: move(4, 1), padx=50, pady=50), Button(root, command=lambda: move(4, 2), padx=50, pady=50), Button(root, command=lambda: move(4, 3), padx=50, pady=50), Button(root, command=lambda: move(4, 4), padx=50, pady=50), Button(root, command=lambda: move(4, 5), padx=50, pady=50), Button(root, command=lambda: move(4, 6), padx=50, pady=50), Button(root, command=lambda: move(4, 7), padx=50, pady=50)], [Button(root, command=lambda: move(5, 0), padx=50, pady=50), Button(root, command=lambda: move(5, 1), padx=50, pady=50), Button(root, command=lambda: move(5, 2), padx=50, pady=50), Button(root, command=lambda: move(5, 3), padx=50, pady=50), Button(root, command=lambda: move(5, 4), padx=50, pady=50), Button(root, command=lambda: move(5, 5), padx=50, pady=50), Button(root, command=lambda: move(5, 6), padx=50, pady=50), Button(root, command=lambda: move(5, 7), padx=50, pady=50)], [Button(root, command=lambda: move(6, 0), padx=50, pady=50), Button(root, command=lambda: move(6, 1), padx=50, pady=50), Button(root, command=lambda: move(6, 2), padx=50, pady=50), Button(root, command=lambda: move(6, 3), padx=50, pady=50), Button(root, command=lambda: move(6, 4), padx=50, pady=50), Button(root, command=lambda: move(6, 5), padx=50, pady=50), Button(root, command=lambda: move(6, 6), padx=50, pady=50), Button(root, command=lambda: move(6, 7), padx=50, pady=50)], [Button(root, command=lambda: move(7, 0), padx=50, pady=50), Button(root, command=lambda: move(7, 1), padx=50, pady=50), Button(root, command=lambda: move(7, 2), padx=50, pady=50), Button(root, command=lambda: move(7, 3), padx=50, pady=50), Button(root, command=lambda: move(7, 4), padx=50, pady=50), Button(root, command=lambda: move(7, 5), padx=50, pady=50), Button(root, command=lambda: move(7, 6), padx=50, pady=50), Button(root, command=lambda: move(7, 7), padx=50, pady=50)]]
for i in range(8):
for j in range(8):
board_buttons[i][j].grid(row=i, column=j)
Button(root, text="RESET", command=init_game).grid(row=8, column=0)
Button(root, text="Resign", command=resign).grid(row=8, column=1)
Button(root, text="Black", command=play_as_black).grid(row=8, column=3)
Button(root, text="White", command=play_as_white).grid(row=8, column=4)
model_select = [Button(root, text=f"{i}", command=lambda: load(i)) for i in range(len(GOOD_MODELS)-1)]
model_select.append(Button(root, text="12", command=lambda: load(-1)))
for i in range(len(GOOD_MODELS)):
model_select[i].grid(row=9, column=i)
Button(root, text="Easy", command=lambda: select_diff(128)).grid(row=8, column=6)
Button(root, text="Default", command=lambda: select_diff(512)).grid(row=8, column=7)
Button(root, text="Hard", command=lambda: select_diff(2048)).grid(row=8, column=8)
Label(root, text=f"Your score: {score[0]}").grid(row=0, column=9, columnspan=3)
Label(root, text=f"Computer score: {score[1]}").grid(row=1, column=9, columnspan=3)
board = np.zeros((1, 8, 8, 2,), dtype="float32")
if __name__ == "__main__":
init_game()
root.mainloop()
| 55.586207 | 3,876 | 0.645885 | 1,609 | 9,672 | 3.837787 | 0.093226 | 0.123077 | 0.108502 | 0.130202 | 0.724858 | 0.70915 | 0.692632 | 0.678381 | 0.678381 | 0.678381 | 0 | 0.073005 | 0.180004 | 9,672 | 173 | 3,877 | 55.907514 | 0.705586 | 0.030294 | 0 | 0.549618 | 0 | 0 | 0.068165 | 0.007277 | 0 | 0 | 0 | 0 | 0 | 1 | 0.061069 | false | 0.007634 | 0.053435 | 0 | 0.19084 | 0.007634 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
07de0e19e9d45a263adba742067b5e397607f7e4 | 338 | py | Python | test_track_qt_common.py | sosiax/track | 4988d2f1d7701f8b8cd6ca8f17d9d829a4dd712e | [
"Apache-2.0"
] | 50 | 2015-05-10T13:59:02.000Z | 2021-07-12T08:06:51.000Z | test_track_qt_common.py | sosiax/track | 4988d2f1d7701f8b8cd6ca8f17d9d829a4dd712e | [
"Apache-2.0"
] | 17 | 2015-04-29T10:49:51.000Z | 2019-07-31T12:50:56.000Z | test_track_qt_common.py | sosiax/track | 4988d2f1d7701f8b8cd6ca8f17d9d829a4dd712e | [
"Apache-2.0"
] | 11 | 2015-08-20T09:43:07.000Z | 2020-03-03T14:41:02.000Z | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import track_qt
def test_change_emitter():
class mock:
pass
m = mock()
ce = track_qt.change_emitter(m)
def test_matrix_table_model():
mtm = track_qt.matrix_table_model(None)
if __name__ == '__main__':
test_change_emitter()
test_matrix_table_model()
| 16.9 | 43 | 0.662722 | 47 | 338 | 4.255319 | 0.574468 | 0.105 | 0.24 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007547 | 0.215976 | 338 | 19 | 44 | 17.789474 | 0.74717 | 0.127219 | 0 | 0 | 0 | 0 | 0.027397 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0.090909 | 0.090909 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
07fdf4dbee9de73598b7b91945c4f065259d4fd7 | 121 | py | Python | .kodi/userdata/addon_data/plugin.video.p2p-streams/acestream/ace/ACEStream/version.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/userdata/addon_data/plugin.video.p2p-streams/acestream/ace/ACEStream/version.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/userdata/addon_data/plugin.video.p2p-streams/acestream/ace/ACEStream/version.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | 2 | 2018-04-17T17:34:39.000Z | 2020-07-26T03:43:33.000Z | #Embedded file name: ACEStream\version.pyo
VERSION = '2.0.8.7'
VERSION_REV = '2191'
VERSION_DATE = '2013/03/28 18:36:41'
| 24.2 | 42 | 0.719008 | 22 | 121 | 3.863636 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205607 | 0.115702 | 121 | 4 | 43 | 30.25 | 0.588785 | 0.338843 | 0 | 0 | 0 | 0 | 0.379747 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
07fe9de8d80eaf5b8dc7a19f4a9e3e737bcb2b2a | 138 | py | Python | credentials-sample.py | samuelcalegari/rocket-ldap-synchro | f0db5e0266764521dae09706864128f43b94ae5b | [
"MIT"
] | null | null | null | credentials-sample.py | samuelcalegari/rocket-ldap-synchro | f0db5e0266764521dae09706864128f43b94ae5b | [
"MIT"
] | null | null | null | credentials-sample.py | samuelcalegari/rocket-ldap-synchro | f0db5e0266764521dae09706864128f43b94ae5b | [
"MIT"
] | null | null | null | credentials = {
'ldap': {
'user': '',
'pass': ''
},
'rocket': {
'user': '',
'pass': ''
}
} | 13.8 | 19 | 0.268116 | 7 | 138 | 5.285714 | 0.714286 | 0.432432 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.471014 | 138 | 10 | 20 | 13.8 | 0.506849 | 0 | 0 | 0.4 | 0 | 0 | 0.18705 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.2 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
6afab604e26816bd392cad4149058b638ad2b5cd | 46,730 | py | Python | cinder/tests/unit/test_solidfire.py | overcastcloud/cinder | ad977d456c5d50e992eee95ea40f4e3dd21981dc | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/test_solidfire.py | overcastcloud/cinder | ad977d456c5d50e992eee95ea40f4e3dd21981dc | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/test_solidfire.py | overcastcloud/cinder | ad977d456c5d50e992eee95ea40f4e3dd21981dc | [
"Apache-2.0"
] | null | null | null |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from mox3 import mox
from oslo_utils import timeutils
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers import solidfire
from cinder.volume import qos_specs
from cinder.volume import volume_types
def create_configuration():
configuration = mox.MockObject(conf.Configuration)
configuration.san_is_local = False
configuration.append_config_values(mox.IgnoreArg())
return configuration
class SolidFireVolumeTestCase(test.TestCase):
def setUp(self):
self.ctxt = context.get_admin_context()
self.configuration = mox.MockObject(conf.Configuration)
self.configuration.sf_allow_tenant_qos = True
self.configuration.san_is_local = True
self.configuration.sf_emulate_512 = True
self.configuration.sf_account_prefix = 'cinder'
self.configuration.reserved_percentage = 25
self.configuration.iscsi_helper = None
self.configuration.sf_template_account_name = 'openstack-vtemplate'
self.configuration.sf_allow_template_caching = False
super(SolidFireVolumeTestCase, self).setUp()
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
self.stubs.Set(solidfire.SolidFireDriver,
'_build_endpoint_info',
self.fake_build_endpoint_info)
self.expected_qos_results = {'minIOPS': 1000,
'maxIOPS': 10000,
'burstIOPS': 20000}
self.mock_stats_data =\
{'result':
{'clusterCapacity': {'maxProvisionedSpace': 107374182400,
'usedSpace': 1073741824,
'compressionPercent': 100,
'deDuplicationPercent': 100,
'thinProvisioningPercent': 100}}}
self.mock_volume = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': 'fast',
'created_at': timeutils.utcnow()}
self.fake_image_meta = {'id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'updated_at': datetime.datetime(2013, 9,
28, 15,
27, 36,
325355),
'is_public': True,
'owner': 'testprjid'}
self.fake_image_service = 'null'
def fake_build_endpoint_info(obj, **kwargs):
endpoint = {}
endpoint['mvip'] = '1.1.1.1'
endpoint['login'] = 'admin'
endpoint['passwd'] = 'admin'
endpoint['port'] = '443'
endpoint['url'] = '{scheme}://{mvip}'.format(mvip='%s:%s' %
(endpoint['mvip'],
endpoint['port']),
scheme='https')
return endpoint
def fake_issue_api_request(obj, method, params, version='1.0'):
if method is 'GetClusterCapacity' and version == '1.0':
data = {'result':
{'clusterCapacity': {'maxProvisionedSpace': 107374182400,
'usedSpace': 1073741824,
'compressionPercent': 100,
'deDuplicationPercent': 100,
'thinProvisioningPercent': 100}}}
return data
elif method is 'GetClusterInfo' and version == '1.0':
results = {'result': {'clusterInfo':
{'name': 'fake-cluster',
'mvip': '1.1.1.1',
'svip': '1.1.1.1',
'uniqueID': 'unqid',
'repCount': 2,
'attributes': {}}}}
return results
elif method is 'AddAccount' and version == '1.0':
return {'result': {'accountID': 25}, 'id': 1}
elif method is 'GetAccountByName' and version == '1.0':
results = {'result': {'account':
{'accountID': 25,
'username': params['username'],
'status': 'active',
'initiatorSecret': '123456789012',
'targetSecret': '123456789012',
'attributes': {},
'volumes': [6, 7, 20]}},
"id": 1}
return results
elif method is 'CreateVolume' and version == '1.0':
return {'result': {'volumeID': 5}, 'id': 1}
elif method is 'CreateSnapshot' and version == '6.0':
return {'result': {'snapshotID': 5}, 'id': 1}
elif method is 'DeleteVolume' and version == '1.0':
return {'result': {}, 'id': 1}
elif method is 'ModifyVolume' and version == '5.0':
return {'result': {}, 'id': 1}
elif method is 'CloneVolume':
return {'result': {'volumeID': 6}, 'id': 2}
elif method is 'ModifyVolume':
return
elif method is 'ListVolumesForAccount' and version == '1.0':
test_name = 'OS-VOLID-a720b3c0-d1f0-11e1-9b23-0800200c9a66'
result = {'result': {
'volumes': [{'volumeID': 5,
'name': test_name,
'accountID': 25,
'sliceCount': 1,
'totalSize': 1 * units.Gi,
'enable512e': True,
'access': "readWrite",
'status': "active",
'attributes': {},
'qos': None,
'iqn': test_name}]}}
return result
elif method is 'ListActiveVolumes':
test_name = "existing_volume"
result = {'result': {
'volumes': [{'volumeID': 5,
'name': test_name,
'accountID': 8,
'sliceCount': 1,
'totalSize': 1 * units.Gi,
'enable512e': True,
'access': "readWrite",
'status': "active",
'attributes': {},
'qos': None,
'iqn': test_name}]}}
return result
elif method is 'DeleteSnapshot':
return {'result': {}}
else:
# Crap, unimplemented API call in Fake
return None
def fake_issue_api_request_fails(obj, method,
params, version='1.0',
endpoint=None):
return {'error': {'code': 000,
'name': 'DummyError',
'message': 'This is a fake error response'},
'id': 1}
def fake_set_qos_by_volume_type(self, type_id, ctxt):
return {'minIOPS': 500,
'maxIOPS': 1000,
'burstIOPS': 1000}
def fake_volume_get(obj, key, default=None):
return {'qos': 'fast'}
def fake_update_cluster_status(self):
return
def fake_get_model_info(self, account, vid):
return {'fake': 'fake-model'}
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_create_volume_with_qos_type(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': 'fast',
'created_at': timeutils.utcnow()}
fake_sfaccounts = [{'accountID': 5,
'name': 'testprjid',
'targetSecret': 'shhhh',
'username': 'john-wayne'}]
test_type = {'name': 'sf-1',
'qos_specs_id': 'fb0576d7-b4b5-4cad-85dc-ca92e6a497d1',
'deleted': False,
'created_at': '2014-02-06 04:58:11',
'updated_at': None,
'extra_specs': {},
'deleted_at': None,
'id': 'e730e97b-bc7d-4af3-934a-32e59b218e81'}
test_qos_spec = {'id': 'asdfafdasdf',
'specs': {'minIOPS': '1000',
'maxIOPS': '2000',
'burstIOPS': '3000'}}
def _fake_get_volume_type(ctxt, type_id):
return test_type
def _fake_get_qos_spec(ctxt, spec_id):
return test_qos_spec
def _fake_do_volume_create(account, params):
return params
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=fake_sfaccounts), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_account_create_availability',
return_value=fake_sfaccounts[0]), \
mock.patch.object(sfv,
'_do_volume_create',
side_effect=_fake_do_volume_create), \
mock.patch.object(volume_types,
'get_volume_type',
side_effect=_fake_get_volume_type), \
mock.patch.object(qos_specs,
'get_qos_specs',
side_effect=_fake_get_qos_spec):
self.assertEqual({'burstIOPS': 3000,
'minIOPS': 1000,
'maxIOPS': 2000},
sfv.create_volume(testvol)['qos'])
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_create_volume(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'created_at': timeutils.utcnow()}
fake_sfaccounts = [{'accountID': 5,
'name': 'testprjid',
'targetSecret': 'shhhh',
'username': 'john-wayne'}]
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=fake_sfaccounts), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_account_create_availability',
return_value=fake_sfaccounts[0]):
model_update = sfv.create_volume(testvol)
self.assertIsNotNone(model_update)
self.assertIsNone(model_update.get('provider_geometry', None))
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_create_volume_non_512e(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'created_at': timeutils.utcnow()}
fake_sfaccounts = [{'accountID': 5,
'name': 'testprjid',
'targetSecret': 'shhhh',
'username': 'john-wayne'}]
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=fake_sfaccounts), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_account_create_availability',
return_value=fake_sfaccounts[0]):
self.configuration.sf_emulate_512 = False
model_update = sfv.create_volume(testvol)
self.configuration.sf_emulate_512 = True
self.assertEqual(model_update.get('provider_geometry', None),
'4096 4096')
def test_create_delete_snapshot(self):
testsnap = {'project_id': 'testprjid',
'name': 'testvol',
'volume_size': 1,
'id': 'b831c4d1-d1f0-11e1-9b23-0800200c9a66',
'volume_id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
sfv.create_snapshot(testsnap)
with mock.patch.object(solidfire.SolidFireDriver,
'_get_sf_snapshots',
return_value=[{'snapshotID': '1',
'name': 'UUID-b831c4d1-d1f0-11e1-9b23-0800200c9a66'}]), \
mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=[{'accountID': 5,
'name': 'testprjid'}]):
sfv.delete_snapshot(testsnap)
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_create_clone(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
_fake_get_snaps = [{'snapshotID': 5, 'name': 'testvol'}]
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'created_at': timeutils.utcnow()}
testvol_b = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'b831c4d1-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sf_snapshots',
return_value=_fake_get_snaps), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=[]), \
mock.patch.object(sfv,
'_get_model_info',
return_value={}):
sfv.create_cloned_volume(testvol_b, testvol)
def test_initialize_connector_with_blocksizes(self):
connector = {'initiator': 'iqn.2012-07.org.fake:01'}
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'volume_type_id': None,
'provider_location': '10.10.7.1:3260 iqn.2010-01.com.'
'solidfire:87hg.uuid-2cc06226-cc'
'74-4cb7-bd55-14aed659a0cc.4060 0',
'provider_auth': 'CHAP stack-1-a60e2611875f40199931f2'
'c76370d66b 2FE0CQ8J196R',
'provider_geometry': '4096 4096',
'created_at': timeutils.utcnow(),
}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
properties = sfv.initialize_connection(testvol, connector)
self.assertEqual('4096', properties['data']['physical_block_size'])
self.assertEqual('4096', properties['data']['logical_block_size'])
def test_create_volume_fails(self):
# NOTE(JDG) This test just fakes update_cluster_status
# this is inentional for this test
self.stubs.Set(solidfire.SolidFireDriver,
'_update_cluster_status',
self.fake_update_cluster_status)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request_fails)
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
try:
sfv.create_volume(testvol)
self.fail("Should have thrown Error")
except Exception:
pass
def test_create_sfaccount(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
account = sfv._create_sfaccount('project-id')
self.assertIsNotNone(account)
def test_create_sfaccount_fails(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request_fails)
account = sfv._create_sfaccount('project-id')
self.assertIsNone(account)
def test_get_sfaccount_by_name(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
account = sfv._get_sfaccount_by_name('some-name')
self.assertIsNotNone(account)
def test_get_sfaccount_by_name_fails(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request_fails)
account = sfv._get_sfaccount_by_name('some-name')
self.assertIsNone(account)
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_delete_volume(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
fake_sfaccounts = [{'accountID': 5,
'name': 'testprjid',
'targetSecret': 'shhhh',
'username': 'john-wayne'}]
def _fake_do_v_create(project_id, params):
return project_id, params
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=fake_sfaccounts), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_account_create_availability',
return_value=fake_sfaccounts[0]), \
mock.patch.object(sfv,
'_do_volume_create',
side_effect=_fake_do_v_create):
sfv.delete_volume(testvol)
def test_delete_volume_fails_no_volume(self):
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
testvol = {'project_id': 'testprjid',
'name': 'no-name',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
try:
sfv.delete_volume(testvol)
self.fail("Should have thrown Error")
except Exception:
pass
def test_get_cluster_info(self):
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
sfv._get_cluster_info()
def test_get_cluster_info_fail(self):
# NOTE(JDG) This test just fakes update_cluster_status
# this is inentional for this test
self.stubs.Set(solidfire.SolidFireDriver,
'_update_cluster_status',
self.fake_update_cluster_status)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request_fails)
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.assertRaises(exception.SolidFireAPIException,
sfv._get_cluster_info)
def test_extend_volume(self):
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
sfv.extend_volume(testvol, 2)
def test_extend_volume_fails_no_volume(self):
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
testvol = {'project_id': 'testprjid',
'name': 'no-name',
'size': 1,
'id': 'not-found'}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.assertRaises(exception.VolumeNotFound,
sfv.extend_volume,
testvol, 2)
def test_extend_volume_fails_account_lookup(self):
# NOTE(JDG) This test just fakes update_cluster_status
# this is intentional for this test
self.stubs.Set(solidfire.SolidFireDriver,
'_update_cluster_status',
self.fake_update_cluster_status)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request_fails)
testvol = {'project_id': 'testprjid',
'name': 'no-name',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.assertRaises(exception.SolidFireAccountNotFound,
sfv.extend_volume,
testvol, 2)
def test_set_by_qos_spec_with_scoping(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
qos_ref = qos_specs.create(self.ctxt,
'qos-specs-1', {'qos:minIOPS': '1000',
'qos:maxIOPS': '10000',
'qos:burstIOPS': '20000'})
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:minIOPS": "100",
"qos:burstIOPS": "300",
"qos:maxIOPS": "200"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
qos = sfv._set_qos_by_volume_type(self.ctxt, type_ref['id'])
self.assertEqual(qos, self.expected_qos_results)
def test_set_by_qos_spec(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
qos_ref = qos_specs.create(self.ctxt,
'qos-specs-1', {'minIOPS': '1000',
'maxIOPS': '10000',
'burstIOPS': '20000'})
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:minIOPS": "100",
"qos:burstIOPS": "300",
"qos:maxIOPS": "200"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
qos = sfv._set_qos_by_volume_type(self.ctxt, type_ref['id'])
self.assertEqual(qos, self.expected_qos_results)
def test_set_by_qos_by_type_only(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:minIOPS": "100",
"qos:burstIOPS": "300",
"qos:maxIOPS": "200"})
qos = sfv._set_qos_by_volume_type(self.ctxt, type_ref['id'])
self.assertEqual(qos, {'minIOPS': 100,
'maxIOPS': 200,
'burstIOPS': 300})
def test_accept_transfer(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
expected = {'provider_auth': 'CHAP cinder-new_project 123456789012'}
self.assertEqual(sfv.accept_transfer(self.ctxt,
testvol,
'new_user', 'new_project'),
expected)
def test_accept_transfer_volume_not_found_raises(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'created_at': timeutils.utcnow()}
self.assertRaises(exception.VolumeNotFound,
sfv.accept_transfer,
self.ctxt,
testvol,
'new_user',
'new_project')
def test_retype(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:minIOPS": "500",
"qos:burstIOPS": "2000",
"qos:maxIOPS": "1000"})
diff = {'encryption': {}, 'qos_specs': {},
'extra_specs': {'qos:burstIOPS': ('10000', u'2000'),
'qos:minIOPS': ('1000', u'500'),
'qos:maxIOPS': ('10000', u'1000')}}
host = None
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
self.assertTrue(sfv.retype(self.ctxt,
testvol,
type_ref, diff, host))
def test_retype_with_qos_spec(self):
test_type = {'name': 'sf-1',
'qos_specs_id': 'fb0576d7-b4b5-4cad-85dc-ca92e6a497d1',
'deleted': False,
'created_at': '2014-02-06 04:58:11',
'updated_at': None,
'extra_specs': {},
'deleted_at': None,
'id': 'e730e97b-bc7d-4af3-934a-32e59b218e81'}
test_qos_spec = {'id': 'asdfafdasdf',
'specs': {'minIOPS': '1000',
'maxIOPS': '2000',
'burstIOPS': '3000'}}
def _fake_get_volume_type(ctxt, type_id):
return test_type
def _fake_get_qos_spec(ctxt, spec_id):
return test_qos_spec
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
self.stubs.Set(volume_types, 'get_volume_type',
_fake_get_volume_type)
self.stubs.Set(qos_specs, 'get_qos_specs',
_fake_get_qos_spec)
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
diff = {'encryption': {}, 'extra_specs': {},
'qos_specs': {'burstIOPS': ('10000', '2000'),
'minIOPS': ('1000', '500'),
'maxIOPS': ('10000', '1000')}}
host = None
testvol = {'project_id': 'testprjid',
'name': 'test_volume',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.assertTrue(sfv.retype(self.ctxt,
testvol,
test_type, diff, host))
def test_update_cluster_status(self):
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
sfv._update_cluster_status()
self.assertEqual(sfv.cluster_stats['free_capacity_gb'], 99.0)
self.assertEqual(sfv.cluster_stats['total_capacity_gb'], 100.0)
def test_manage_existing_volume(self):
external_ref = {'name': 'existing volume', 'source-id': 5}
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66',
'created_at': timeutils.utcnow()}
self.stubs.Set(solidfire.SolidFireDriver,
'_issue_api_request',
self.fake_issue_api_request)
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
model_update = sfv.manage_existing(testvol, external_ref)
self.assertIsNotNone(model_update)
self.assertIsNone(model_update.get('provider_geometry', None))
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_create_volume_for_migration(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'b830b3c0-d1f0-11e1-9b23-1900200c9a77',
'volume_type_id': None,
'created_at': timeutils.utcnow(),
'migration_status': 'target:'
'a720b3c0-d1f0-11e1-9b23-0800200c9a66'}
fake_sfaccounts = [{'accountID': 5,
'name': 'testprjid',
'targetSecret': 'shhhh',
'username': 'john-wayne'}]
def _fake_do_v_create(project_id, params):
return project_id, params
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
with mock.patch.object(sfv,
'_get_sfaccounts_for_tenant',
return_value=fake_sfaccounts), \
mock.patch.object(sfv,
'_issue_api_request',
side_effect=self.fake_issue_api_request), \
mock.patch.object(sfv,
'_get_account_create_availability',
return_value=fake_sfaccounts[0]), \
mock.patch.object(sfv,
'_do_volume_create',
side_effect=_fake_do_v_create):
proj_id, sf_vol_object = sfv.create_volume(testvol)
self.assertEqual('a720b3c0-d1f0-11e1-9b23-0800200c9a66',
sf_vol_object['attributes']['uuid'])
self.assertEqual('b830b3c0-d1f0-11e1-9b23-1900200c9a77',
sf_vol_object['attributes']['migration_uuid'])
self.assertEqual('UUID-a720b3c0-d1f0-11e1-9b23-0800200c9a66',
sf_vol_object['name'])
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_get_sfaccount')
@mock.patch.object(solidfire.SolidFireDriver, '_get_sf_volume')
@mock.patch.object(solidfire.SolidFireDriver, '_create_image_volume')
def test_verify_image_volume_out_of_date(self,
_mock_create_image_volume,
_mock_get_sf_volume,
_mock_get_sfaccount,
_mock_issue_api_request):
fake_sf_vref = {
'status': 'active', 'volumeID': 1,
'attributes': {
'image_info':
{'image_updated_at': '2014-12-17T00:16:23+00:00',
'image_id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'image_name': 'fake-image',
'image_created_at': '2014-12-17T00:16:23+00:00'}}}
stats_data =\
{'result':
{'clusterCapacity': {'maxProvisionedSpace': 107374182400,
'usedSpace': 1073741824,
'compressionPercent': 100,
'deDuplicationPercent': 100,
'thinProvisioningPercent': 100}}}
_mock_issue_api_request.return_value = stats_data
_mock_get_sfaccount.return_value = {'username': 'openstack-vtemplate',
'accountID': 7777}
_mock_get_sf_volume.return_value = fake_sf_vref
_mock_create_image_volume.return_value = fake_sf_vref
image_meta = {'id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'updated_at': datetime.datetime(2013, 9, 28,
15, 27, 36,
325355)}
image_service = 'null'
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
_mock_issue_api_request.return_value = {'result': 'ok'}
sfv._verify_image_volume(self.ctxt, image_meta, image_service)
self.assertTrue(_mock_create_image_volume.called)
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_get_sfaccount')
@mock.patch.object(solidfire.SolidFireDriver, '_get_sf_volume')
@mock.patch.object(solidfire.SolidFireDriver, '_create_image_volume')
def test_verify_image_volume_ok(self,
_mock_create_image_volume,
_mock_get_sf_volume,
_mock_get_sfaccount,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_get_sfaccount.return_value = {'username': 'openstack-vtemplate',
'accountID': 7777}
_mock_get_sf_volume.return_value =\
{'status': 'active', 'volumeID': 1,
'attributes': {
'image_info':
{'image_updated_at': '2013-09-28T15:27:36.325355',
'image_id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'image_name': 'fake-image',
'image_created_at': '2014-12-17T00:16:23+00:00'}}}
_mock_create_image_volume.return_value = None
image_meta = {'id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'updated_at': datetime.datetime(2013, 9, 28,
15, 27, 36,
325355)}
image_service = 'null'
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
_mock_issue_api_request.return_value = {'result': 'ok'}
sfv._verify_image_volume(self.ctxt, image_meta, image_service)
self.assertFalse(_mock_create_image_volume.called)
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
def test_clone_image_not_configured(self, _mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
self.assertEqual((None, False),
sfv.clone_image(self.ctxt,
self.mock_volume,
'fake',
self.fake_image_meta,
'fake'))
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_clone_image_authorization(self, _mock_create_template_account):
_mock_create_template_account.return_value = 1
self.configuration.sf_allow_template_caching = True
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
# Make sure if it's NOT public and we're NOT the owner it
# doesn't try and cache
_fake_image_meta = {'id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'updated_at': datetime.datetime(2013, 9,
28, 15,
27, 36,
325355),
'properties': {'virtual_size': 1},
'is_public': False,
'owner': 'wrong-owner'}
with mock.patch.object(sfv, '_do_clone_volume',
return_value=('fe', 'fi', 'fo')):
self.assertEqual((None, False),
sfv.clone_image(self.ctxt,
self.mock_volume,
'fake',
_fake_image_meta,
'fake'))
# And is_public False, but the correct owner does work
_fake_image_meta['owner'] = 'testprjid'
self.assertEqual(('fo', True), sfv.clone_image(self.ctxt,
self.mock_volume,
'fake',
_fake_image_meta,
'fake'))
# And is_public True, even if not the correct owner
_fake_image_meta['is_public'] = True
_fake_image_meta['owner'] = 'wrong-owner'
self.assertEqual(('fo', True), sfv.clone_image(self.ctxt,
self.mock_volume,
'fake',
_fake_image_meta,
'fake'))
@mock.patch.object(solidfire.SolidFireDriver, '_issue_api_request')
@mock.patch.object(solidfire.SolidFireDriver, '_create_template_account')
def test_clone_image_virt_size_not_set(self,
_mock_create_template_account,
_mock_issue_api_request):
_mock_issue_api_request.return_value = self.mock_stats_data
_mock_create_template_account.return_value = 1
self.configuration.sf_allow_template_caching = True
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
# Don't run clone_image if virtual_size property not on image
_fake_image_meta = {'id': '17c550bb-a411-44c0-9aaf-0d96dd47f501',
'updated_at': datetime.datetime(2013, 9,
28, 15,
27, 36,
325355),
'is_public': True,
'owner': 'testprjid'}
self.assertEqual((None, False),
sfv.clone_image(self.ctxt,
self.mock_volume,
'fake',
_fake_image_meta,
'fake'))
def test_create_template_no_account(self):
sfv = solidfire.SolidFireDriver(configuration=self.configuration)
def _fake_issue_api_req(method, params, version=0):
if 'GetAccountByName' in method:
raise exception.SolidFireAPIException
return {'result': {'accountID': 1}}
with mock.patch.object(sfv,
'_issue_api_request',
side_effect=_fake_issue_api_req):
self.assertEqual(1,
sfv._create_template_account('foo'))
| 47.345491 | 103 | 0.503895 | 4,079 | 46,730 | 5.472175 | 0.109831 | 0.030465 | 0.055777 | 0.062721 | 0.773666 | 0.730075 | 0.70297 | 0.676941 | 0.666637 | 0.645894 | 0 | 0.056263 | 0.399807 | 46,730 | 986 | 104 | 47.393509 | 0.73958 | 0.02446 | 0 | 0.67929 | 0 | 0 | 0.170175 | 0.049518 | 0 | 0 | 0 | 0 | 0.04142 | 1 | 0.060355 | false | 0.00355 | 0.014201 | 0.014201 | 0.110059 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
ed12d273e9302bfaab622164962d813cc3d05935 | 3,237 | py | Python | pycache/Adapter/CacheItemPoolInterface.py | pycache-adaptor/pycache-adaptor | 62bf65a11999724424aff50c274b6fb9949cfa48 | [
"MIT"
] | 48 | 2016-06-20T18:12:02.000Z | 2018-12-11T05:59:20.000Z | pycache/Adapter/CacheItemPoolInterface.py | pycache-adaptor/pycache-adaptor | 62bf65a11999724424aff50c274b6fb9949cfa48 | [
"MIT"
] | 1 | 2016-06-21T21:36:23.000Z | 2016-06-22T16:03:35.000Z | pycache/Adapter/CacheItemPoolInterface.py | pycache-adaptor/pycache-adaptor | 62bf65a11999724424aff50c274b6fb9949cfa48 | [
"MIT"
] | 2 | 2016-06-21T11:42:02.000Z | 2018-07-20T14:17:53.000Z | # -*- coding: utf-8 -*-
"""CacheItemPoolInterface.
This module generates CacheItemInterface objects.
"""
from abc import ABCMeta, abstractmethod
class CacheItemPoolInterface():
__metaclass__ = ABCMeta
@abstractmethod
def get_item(self, key):
"""Returns a Cache Item representing the specified key.
Note:
This method must always return a CacheItemInterface object, even in case of
a cache miss. It MUST NOT return null.
:param key: The key for which to return the corresponding Cache Item.
:exception CacheException: If the `key` string is not a legal value
:return The corresponding Cache Item.
"""
pass
@abstractmethod
def get_items(self, keys=None):
"""Returns a traversable set of cache items.
:param keys: An indexed array of keys of items to retrieve.
:exception CacheException: If any of the keys in `keys` are not a legal value
:return
A traversable collection of Cache Items keyed by the cache keys of
each item. A Cache item will be returned for each key, even if that
key is not found. However, if no keys are specified then an empty
traversable MUST be returned instead.
"""
pass
@abstractmethod
def has_item(self, key):
"""Confirms if the cache contains specified cache item.
Note:
This method MAY avoid retrieving the cached value for performance reasons.
This could result in a race condition with CacheItemInterface::get(). To avoid
such situation use CacheItemInterface::isHit() instead.
:param key: The key for which to check existence.
:exception CacheException: If any of the keys in `keys` are not a legal value
:return True if item exists in the cache, false otherwise.
"""
pass
@abstractmethod
def clear(self):
"""Deletes all items in the pool.
:return True if the pool was successfully cleared. False if there was an error.
"""
pass
@abstractmethod
def delete_item(self, key):
"""Removes the item from the pool.
:param key: The key for which to delete
:exception CacheException: If any of the keys in `keys` are not a legal value
:return True if the item was successfully removed. False if there was an error.
"""
pass
@abstractmethod
def save(self, item):
"""Persists a cache item immediately.
:param item: The cache item to save.
:return True if the item was successfully persisted. False if there was an error.
"""
pass
@abstractmethod
def save_deferred(self, item):
"""Sets a cache item to be persisted later.
:param item: The cache item to save.
:return False if the item could not be queued or if a commit was attempted and failed. True otherwise.
"""
pass
@abstractmethod
def commit(self):
"""Persists any deferred cache items.
:return True if all not-yet-saved items were successfully saved or there were none. False otherwise.
"""
pass
| 27.432203 | 110 | 0.636701 | 419 | 3,237 | 4.897375 | 0.312649 | 0.039474 | 0.071637 | 0.02729 | 0.297758 | 0.260721 | 0.260721 | 0.201267 | 0.169103 | 0.148148 | 0 | 0.000443 | 0.303367 | 3,237 | 117 | 111 | 27.666667 | 0.909534 | 0.679024 | 0 | 0.592593 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.296296 | false | 0.296296 | 0.037037 | 0 | 0.407407 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
ed1ed4d49d095bbaa7b5460d4c77920236751871 | 216 | py | Python | Module1--- HelloWorld-variables and operators/Exercise_Solutions/Taxi_estimate.py | sunjeet-khokhar/Python_for_Testers | 156231f7d49566c62f9e227e5dbf4de6116148f4 | [
"Apache-2.0"
] | null | null | null | Module1--- HelloWorld-variables and operators/Exercise_Solutions/Taxi_estimate.py | sunjeet-khokhar/Python_for_Testers | 156231f7d49566c62f9e227e5dbf4de6116148f4 | [
"Apache-2.0"
] | null | null | null | Module1--- HelloWorld-variables and operators/Exercise_Solutions/Taxi_estimate.py | sunjeet-khokhar/Python_for_Testers | 156231f7d49566c62f9e227e5dbf4de6116148f4 | [
"Apache-2.0"
] | null | null | null | total_taxis = 100
size = 4
expected_customers = 120
size_without_driver = size - 1
number_of_cars_required = expected_customers/size_without_driver
print(f"number of cars required {number_of_cars_required}")
| 27 | 65 | 0.800926 | 32 | 216 | 5 | 0.53125 | 0.15 | 0.225 | 0.375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043011 | 0.138889 | 216 | 7 | 66 | 30.857143 | 0.817204 | 0 | 0 | 0 | 0 | 0 | 0.235577 | 0.120192 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
ed2b6a0a4ab182633882eb149a2c8480e5c1c23c | 369 | py | Python | pyteleport/tests/_test_teleport_basic.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | 9 | 2021-06-21T09:17:42.000Z | 2022-01-28T10:34:36.000Z | pyteleport/tests/_test_teleport_basic.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | null | null | null | pyteleport/tests/_test_teleport_basic.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | 1 | 2021-09-25T02:03:41.000Z | 2021-09-25T02:03:41.000Z | """
[True] hello
[True] vstack []
[True] bstack []
[False] vstack []
[False] bstack []
[False] world
"""
from pyteleport import tp_dummy
from pyteleport.tests.helpers import setup_verbose_logging, print_stack_here, print_, get_tp_args
setup_verbose_logging()
print_("hello")
print_stack_here(print_)
tp_dummy(**get_tp_args())
print_stack_here(print_)
print_("world")
| 19.421053 | 97 | 0.761518 | 52 | 369 | 5 | 0.403846 | 0.115385 | 0.161538 | 0.219231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100271 | 369 | 18 | 98 | 20.5 | 0.783133 | 0.260163 | 0 | 0.25 | 0 | 0 | 0.037736 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.25 | 0 | 0.25 | 0.625 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
ed5ec70a9ecd5d7a92e5453279b397612f02b7cf | 137 | py | Python | credentials-example.py | Tylerlhess/SquawkerAPI | 218025666599ab648c035496b5987c0782844393 | [
"Apache-2.0"
] | null | null | null | credentials-example.py | Tylerlhess/SquawkerAPI | 218025666599ab648c035496b5987c0782844393 | [
"Apache-2.0"
] | 1 | 2022-01-31T07:19:33.000Z | 2022-01-31T07:19:33.000Z | credentials-example.py | Tylerlhess/SquawkerAPI | 218025666599ab648c035496b5987c0782844393 | [
"Apache-2.0"
] | null | null | null | # Set the following variables and rename to credentials.py
USER = RPC_USERNAME
PASSWORD = RPC_PASSWORD
SITE_SECRET_KEY = BYTE_STRING
| 15.222222 | 58 | 0.80292 | 20 | 137 | 5.25 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.160584 | 137 | 8 | 59 | 17.125 | 0.913043 | 0.408759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.333333 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
ed798b1ddd755f86525c23ef0af5296fbfa0a2f0 | 12,702 | py | Python | ziggurat_foundations/migrations/versions/20671b28c538_change_all_linking_k.py | timgates42/ziggurat_foundations | 9eeec894d08e8d7defa60ddc04b63f69cd4cbeba | [
"BSD-3-Clause"
] | null | null | null | ziggurat_foundations/migrations/versions/20671b28c538_change_all_linking_k.py | timgates42/ziggurat_foundations | 9eeec894d08e8d7defa60ddc04b63f69cd4cbeba | [
"BSD-3-Clause"
] | null | null | null | ziggurat_foundations/migrations/versions/20671b28c538_change_all_linking_k.py | timgates42/ziggurat_foundations | 9eeec894d08e8d7defa60ddc04b63f69cd4cbeba | [
"BSD-3-Clause"
] | null | null | null | """change all linking keys from chars to id's
Revision ID: 20671b28c538
Revises: 4c10d97c509
Create Date: 2012-07-07 21:49:21.906150
"""
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
from alembic.context import get_context
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = "20671b28c538"
down_revision = "4c10d97c509"
def upgrade():
c = get_context()
insp = Inspector.from_engine(c.connection.engine)
# existing migration
# pre naming convention keys
groups_permissions_pkey = "groups_permissions_pkey"
groups_pkey = "groups_pkey"
groups_resources_permissions_pkey = "groups_resources_permissions_pkey"
users_groups_pkey = "users_groups_pkey"
users_permissions_pkey = "users_permissions_pkey"
users_resources_permissions_pkey = "users_resources_permissions_pkey"
# inspected keys
groups_permissions_pkey = insp.get_pk_constraint("groups_permissions")["name"]
groups_pkey = insp.get_pk_constraint("groups")["name"]
groups_resources_permissions_pkey = insp.get_pk_constraint(
"groups_resources_permissions"
)["name"]
users_groups_pkey = insp.get_pk_constraint("users_groups")["name"]
users_permissions_pkey = insp.get_pk_constraint("users_permissions")["name"]
users_resources_permissions_pkey = insp.get_pk_constraint(
"users_resources_permissions"
)["name"]
op.drop_constraint("groups_pkey", "groups", type_="primary")
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column(
"groups", sa.Column("id", sa.Integer, primary_key=True, autoincrement=False)
)
op.create_primary_key(groups_pkey, "groups", cols=["id"])
op.alter_column(
"groups",
"id",
type_=sa.Integer,
existing_type=sa.Integer,
autoincrement=True,
existing_autoincrement=False,
nullable=False,
)
else:
op.add_column(
"groups", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True)
)
op.create_primary_key(groups_pkey, "groups", cols=["id"])
if isinstance(c.connection.engine.dialect, MySQLDialect):
for t in ["groups_permissions", "groups_resources_permissions", "users_groups"]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["group_name"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
for t in ["users_resources_permissions", "users_permissions", "users_groups"]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["user_name"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
for constraint in insp.get_foreign_keys("resources"):
if constraint["referred_columns"] in [["user_name"], ["group_name"]]:
op.drop_constraint(constraint["name"], "resources", type_="foreignkey")
op.add_column(
"resources",
sa.Column(
"owner_user_id",
sa.Integer(),
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="SET NULL"),
),
)
op.add_column(
"resources",
sa.Column(
"owner_group_id",
sa.Integer(),
sa.ForeignKey("groups.id", onupdate="CASCADE", ondelete="SET NULL"),
),
)
# update the data
resources_table = sa.Table(
"resources", sa.MetaData(), autoload=True, autoload_with=c.connection
)
users_table = sa.Table(
"users", sa.MetaData(), autoload=True, autoload_with=c.connection
)
groups_table = sa.Table(
"groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
resources_table.update()
.values(owner_user_id=users_table.c.id)
.where(users_table.c.user_name == resources_table.c.owner_user_name)
)
op.execute(stmt)
stmt = (
resources_table.update()
.values(owner_group_id=groups_table.c.id)
.where(groups_table.c.group_name == resources_table.c.owner_group_name)
)
op.execute(stmt)
# mysql is stupid as usual so we cant create FKEY and add PKEY later,
# need to set PKEY first and then set FKEY
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("groups_permissions", sa.Column("group_id", sa.Integer()))
else:
op.add_column(
"groups_permissions",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey(
"groups.id", onupdate="CASCADE", ondelete="CASCADE" # noqa # noqa
),
),
) # noqa
groups_permissions_table = sa.Table(
"groups_permissions", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
groups_permissions_table.update()
.values(group_id=groups_table.c.id)
.where(groups_table.c.group_name == groups_permissions_table.c.group_name)
)
op.execute(stmt)
op.drop_constraint(groups_permissions_pkey, "groups_permissions", type_="primary")
op.create_primary_key(
groups_permissions_pkey, "groups_permissions", cols=["group_id", "perm_name"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"groups_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column(
"groups_resources_permissions", sa.Column("group_id", sa.Integer())
)
else:
op.add_column(
"groups_resources_permissions",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey("groups.id", onupdate="CASCADE", ondelete="CASCADE"),
),
)
groups_resources_permissions_table = sa.Table(
"groups_resources_permissions",
sa.MetaData(),
autoload=True,
autoload_with=c.connection,
)
stmt = (
groups_resources_permissions_table.update()
.values(group_id=groups_table.c.id)
.where(
groups_table.c.group_name == groups_resources_permissions_table.c.group_name
)
)
op.execute(stmt)
op.drop_constraint(
groups_resources_permissions_pkey,
"groups_resources_permissions",
type_="primary",
)
op.create_primary_key(
groups_resources_permissions_pkey,
"groups_resources_permissions",
cols=["group_id", "resource_id", "perm_name"],
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"groups_resources_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_groups", sa.Column("group_id", sa.Integer()))
else:
op.add_column(
"users_groups",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey(
"groups.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_groups_table = sa.Table(
"users_groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_groups_table.update()
.values(group_id=groups_table.c.id)
.where(groups_table.c.group_name == users_groups_table.c.group_name)
)
op.execute(stmt)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_groups", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_groups",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_groups_table = sa.Table(
"users_groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_groups_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_groups_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(users_groups_pkey, "users_groups", type="primary")
op.create_primary_key(
users_groups_pkey, "users_groups", cols=["user_id", "group_id"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_groups",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"users_groups",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_permissions", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_permissions",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_permissions_table = sa.Table(
"users_permissions", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_permissions_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_permissions_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(users_permissions_pkey, "users_permissions", type="primary")
op.create_primary_key(
users_permissions_pkey, "users_permissions", cols=["user_id", "perm_name"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_resources_permissions", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_resources_permissions",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
),
)
users_resources_permissions_table = sa.Table(
"users_resources_permissions",
sa.MetaData(),
autoload=True,
autoload_with=c.connection,
)
stmt = (
users_resources_permissions_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_resources_permissions_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(
users_resources_permissions_pkey, "users_resources_permissions", type="primary"
)
op.create_primary_key(
users_resources_permissions_pkey,
"users_resources_permissions",
cols=["user_id", "resource_id", "perm_name"],
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_resources_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.drop_column("resources", "owner_user_name")
op.drop_column("resources", "owner_group_name")
op.drop_column("groups_permissions", "group_name")
op.drop_column("groups_resources_permissions", "group_name")
op.drop_column("users_resources_permissions", "user_name")
op.drop_column("users_groups", "group_name")
op.drop_column("users_groups", "user_name")
op.drop_column("users_permissions", "user_name")
def downgrade():
pass
| 33.603175 | 88 | 0.608723 | 1,366 | 12,702 | 5.36896 | 0.090776 | 0.092719 | 0.060267 | 0.047723 | 0.826152 | 0.749932 | 0.700845 | 0.607445 | 0.561085 | 0.550177 | 0 | 0.00607 | 0.273736 | 12,702 | 377 | 89 | 33.692308 | 0.788943 | 0.031727 | 0 | 0.564565 | 0 | 0 | 0.174813 | 0.049283 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006006 | false | 0.003003 | 0.018018 | 0 | 0.024024 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
ed847a88d7fb5b50ea3ee85273b2b0f230a0f280 | 870 | py | Python | src/gino/__init__.py | fantix-org/gino | 3f52ff11a8a07192e60c2e015c8d4ba71bbf62ab | [
"BSD-3-Clause"
] | null | null | null | src/gino/__init__.py | fantix-org/gino | 3f52ff11a8a07192e60c2e015c8d4ba71bbf62ab | [
"BSD-3-Clause"
] | null | null | null | src/gino/__init__.py | fantix-org/gino | 3f52ff11a8a07192e60c2e015c8d4ba71bbf62ab | [
"BSD-3-Clause"
] | null | null | null | from .api import Gino # NOQA
from .bakery import Bakery
from .engine import GinoEngine, GinoConnection # NOQA
from .exceptions import * # NOQA
from .strategies import GinoStrategy # NOQA
def create_engine(*args, **kwargs):
"""
Shortcut for :func:`sqlalchemy.create_engine` with ``strategy="gino"``.
.. versionchanged:: 1.1
Added the ``bakery`` keyword argument, please see :class:`~.bakery.Bakery`.
"""
from sqlalchemy import create_engine
kwargs.setdefault("strategy", "gino")
return create_engine(*args, **kwargs)
def get_version():
"""Get current GINO version."""
try:
from importlib.metadata import version
except ImportError:
from importlib_metadata import version
return version("gino")
# noinspection PyBroadException
try:
__version__ = get_version()
except Exception:
pass
| 23.513514 | 82 | 0.690805 | 97 | 870 | 6.082474 | 0.463918 | 0.081356 | 0.054237 | 0.074576 | 0.115254 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002886 | 0.203448 | 870 | 36 | 83 | 24.166667 | 0.848485 | 0.289655 | 0 | 0.105263 | 0 | 0 | 0.02735 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0.052632 | 0.473684 | 0 | 0.684211 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 3 |
ed8bc1ae5df5a8aa08cd4084d9938df0e5978ae9 | 32,755 | py | Python | seleniumbase/fixtures/page_actions.py | tomejorge/SeleniumBase | e3e50bbd80594c52131b0d88ca3e2c2f7692e340 | [
"MIT"
] | 1 | 2021-05-12T14:27:31.000Z | 2021-05-12T14:27:31.000Z | seleniumbase/fixtures/page_actions.py | tomejorge/SeleniumBase | e3e50bbd80594c52131b0d88ca3e2c2f7692e340 | [
"MIT"
] | null | null | null | seleniumbase/fixtures/page_actions.py | tomejorge/SeleniumBase | e3e50bbd80594c52131b0d88ca3e2c2f7692e340 | [
"MIT"
] | null | null | null | """
This module contains a set of methods that can be used for page loads and
for waiting for elements to appear on a page.
These methods improve on and expand existing WebDriver commands.
Improvements include making WebDriver commands more robust and more reliable
by giving page elements enough time to load before taking action on them.
The default option for searching for elements is by CSS Selector.
This can be changed by overriding the "By" parameter.
Options are:
By.CSS_SELECTOR
By.CLASS_NAME
By.ID
By.NAME
By.LINK_TEXT
By.XPATH
By.TAG_NAME
By.PARTIAL_LINK_TEXT
"""
import codecs
import os
import sys
import time
from selenium.common.exceptions import ElementNotInteractableException
from selenium.common.exceptions import ElementNotVisibleException
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import NoSuchAttributeException
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchFrameException
from selenium.common.exceptions import NoSuchWindowException
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from seleniumbase.config import settings
from seleniumbase.fixtures import shared_utils as s_utils
def is_element_present(driver, selector, by=By.CSS_SELECTOR):
"""
Returns whether the specified element selector is present on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
@Returns
Boolean (is element present)
"""
try:
driver.find_element(by=by, value=selector)
return True
except Exception:
return False
def is_element_visible(driver, selector, by=By.CSS_SELECTOR):
"""
Returns whether the specified element selector is visible on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
@Returns
Boolean (is element visible)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_displayed()
except Exception:
return False
def is_element_enabled(driver, selector, by=By.CSS_SELECTOR):
"""
Returns whether the specified element selector is enabled on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
@Returns
Boolean (is element enabled)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_enabled()
except Exception:
return False
def is_text_visible(driver, text, selector, by=By.CSS_SELECTOR):
"""
Returns whether the specified text is visible in the specified selector.
@Params
driver - the webdriver object (required)
text - the text string to search for
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
@Returns
Boolean (is text visible)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_displayed() and text in element.text
except Exception:
return False
def hover_on_element(driver, selector, by=By.CSS_SELECTOR):
"""
Fires the hover event for the specified element by the given selector.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
"""
element = driver.find_element(by=by, value=selector)
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def hover_element(driver, element):
"""
Similar to hover_on_element(), but uses found element, not a selector.
"""
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def timeout_exception(exception, message):
exception, message = s_utils.format_exc(exception, message)
raise exception(message)
def hover_and_click(
driver,
hover_selector,
click_selector,
hover_by=By.CSS_SELECTOR,
click_by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT,
):
"""
Fires the hover event for a specified element by a given selector, then
clicks on another element specified. Useful for dropdown hover based menus.
@Params
driver - the webdriver object (required)
hover_selector - the css selector to hover over (required)
click_selector - the css selector to click on (required)
hover_by - the hover selector type to search by (Default: By.CSS_SELECTOR)
click_by - the click selector type to search by (Default: By.CSS_SELECTOR)
timeout - number of seconds to wait for click element to appear after hover
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
element = driver.find_element(by=hover_by, value=hover_selector)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element = driver.find_element(by=click_by, value=click_selector)
element.click()
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def hover_element_and_click(
driver,
element,
click_selector,
click_by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT,
):
"""
Similar to hover_and_click(), but assumes top element is already found.
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element = driver.find_element(by=click_by, value=click_selector)
element.click()
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def hover_element_and_double_click(
driver,
element,
click_selector,
click_by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT,
):
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element_2 = driver.find_element(by=click_by, value=click_selector)
actions = ActionChains(driver)
actions.move_to_element(element_2)
actions.double_click(element_2)
actions.perform()
return element_2
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def wait_for_element_present(
driver, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if it exists in the HTML. (The element can be invisible.)
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
@Params
driver - the webdriver object
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object
"""
element = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def wait_for_element_visible(
driver, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the element is present and visible on the page.
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises ElementNotVisibleException if the element exists in the HTML,
but is not visible (eg. opacity is "0") within the specified timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed():
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element and by != By.LINK_TEXT:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but is not visible
message = "Element {%s} was not visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(ElementNotVisibleException, message)
if not element and by == By.LINK_TEXT:
message = "Link text {%s} was not visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_text_visible(
driver, text, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the text is present in the element and visible
on the page.
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises ElementNotVisibleException if the element exists in the HTML,
but the text is not visible within the specified timeout.
@Params
driver - the webdriver object (required)
text - the text that is being searched for in the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed() and text in element.text:
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but the text is not visible
message = (
"Expected text {%s} for {%s} was not visible after %s second%s!"
% (text, selector, timeout, plural)
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_exact_text_visible(
driver, text, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the text matches exactly with the text in the element,
and the text is visible.
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises ElementNotVisibleException if the element exists in the HTML,
but the exact text is not visible within the specified timeout.
@Params
driver - the webdriver object (required)
text - the exact text that is expected for the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed() and text.strip() == element.text.strip():
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but the exact text is not visible
message = (
"Expected exact text {%s} for {%s} was not visible "
"after %s second%s!" % (text, selector, timeout, plural)
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_attribute(
driver,
selector,
attribute,
value=None,
by=By.CSS_SELECTOR,
timeout=settings.LARGE_TIMEOUT,
):
"""
Searches for the specified element attribute by the given selector.
Returns the element object if the expected attribute is present
and the expected attribute value is present (if specified).
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises NoSuchAttributeException if the element exists in the HTML,
but the expected attribute/value is not present within the timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
attribute - the attribute that is expected for the element (required)
value - the attribute value that is expected (Default: None)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the expected attribute/value
"""
element = None
element_present = False
attribute_present = False
found_value = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
element_present = True
attribute_present = False
found_value = element.get_attribute(attribute)
if found_value is not None:
attribute_present = True
else:
element = None
raise Exception()
if value is not None:
if found_value == value:
return element
else:
element = None
raise Exception()
else:
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not element_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
if not attribute_present:
# The element does not have the attribute
message = (
"Expected attribute {%s} of element {%s} was not present "
"after %s second%s!" % (attribute, selector, timeout, plural)
)
timeout_exception(NoSuchAttributeException, message)
# The element attribute exists, but the expected value does not match
message = (
"Expected value {%s} for attribute {%s} of element {%s} was not "
"present after %s second%s! (The actual value was {%s})"
% (value, attribute, selector, timeout, plural, found_value)
)
timeout_exception(NoSuchAttributeException, message)
def wait_for_element_absent(
driver, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector.
Raises an exception if the element is still present after the
specified timeout.
@Params
driver - the webdriver object
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
driver.find_element(by=by, value=selector)
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
except Exception:
return True
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was still present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def wait_for_element_not_visible(
driver, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector.
Raises an exception if the element is still visible after the
specified timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for the element in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
if element.is_displayed():
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
else:
return True
except Exception:
return True
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was still visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def wait_for_text_not_visible(
driver, text, selector, by=By.CSS_SELECTOR, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the text in the element of the given selector on the page.
Returns True if the text is not visible on the page within the timeout.
Raises an exception if the text is still present after the timeout.
@Params
driver - the webdriver object (required)
text - the text that is being searched for in the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
if not is_text_visible(driver, text, selector, by=by):
return True
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Text {%s} in {%s} was still visible after %s second%s!" % (
text,
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def find_visible_elements(driver, selector, by=By.CSS_SELECTOR):
"""
Finds all WebElements that match a selector and are visible.
Similar to webdriver.find_elements.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: By.CSS_SELECTOR)
"""
elements = driver.find_elements(by=by, value=selector)
try:
v_elems = [element for element in elements if element.is_displayed()]
return v_elems
except (StaleElementReferenceException, ElementNotInteractableException):
time.sleep(0.1)
elements = driver.find_elements(by=by, value=selector)
v_elems = []
for element in elements:
if element.is_displayed():
v_elems.append(element)
return v_elems
def save_screenshot(driver, name, folder=None):
"""
Saves a screenshot to the current directory (or to a subfolder if provided)
If the folder provided doesn't exist, it will get created.
The screenshot will be in PNG format.
"""
if not name.endswith(".png"):
name = name + ".png"
if folder:
abs_path = os.path.abspath(".")
file_path = abs_path + "/%s" % folder
if not os.path.exists(file_path):
os.makedirs(file_path)
screenshot_path = "%s/%s" % (file_path, name)
else:
screenshot_path = name
try:
element = driver.find_element(by=By.TAG_NAME, value="body")
element_png = element.screenshot_as_png
with open(screenshot_path, "wb") as file:
file.write(element_png)
except Exception:
if driver:
driver.get_screenshot_as_file(screenshot_path)
else:
pass
def save_page_source(driver, name, folder=None):
"""
Saves the page HTML to the current directory (or given subfolder).
If the folder specified doesn't exist, it will get created.
@Params
name - The file name to save the current page's HTML to.
folder - The folder to save the file to. (Default = current folder)
"""
from seleniumbase.core import log_helper
if not name.endswith(".html"):
name = name + ".html"
if folder:
abs_path = os.path.abspath(".")
file_path = abs_path + "/%s" % folder
if not os.path.exists(file_path):
os.makedirs(file_path)
html_file_path = "%s/%s" % (file_path, name)
else:
html_file_path = name
page_source = driver.page_source
html_file = codecs.open(html_file_path, "w+", "utf-8")
rendered_source = log_helper.get_html_source_with_base_href(
driver, page_source
)
html_file.write(rendered_source)
html_file.close()
def _get_last_page(driver):
try:
last_page = driver.current_url
except Exception:
last_page = "[WARNING! Browser Not Open!]"
if len(last_page) < 5:
last_page = "[WARNING! Browser Not Open!]"
return last_page
def save_test_failure_data(driver, name, browser_type, folder=None):
"""
Saves failure data to the current directory (or to a subfolder if provided)
If the folder provided doesn't exist, it will get created.
"""
import traceback
if folder:
abs_path = os.path.abspath(".")
file_path = abs_path + "/%s" % folder
if not os.path.exists(file_path):
os.makedirs(file_path)
failure_data_file_path = "%s/%s" % (file_path, name)
else:
failure_data_file_path = name
failure_data_file = codecs.open(failure_data_file_path, "w+", "utf-8")
last_page = _get_last_page(driver)
data_to_save = []
data_to_save.append("Last_Page: %s" % last_page)
data_to_save.append("Browser: %s " % browser_type)
data_to_save.append(
"Traceback: "
+ "".join(
traceback.format_exception(
sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]
)
)
)
failure_data_file.writelines("\r\n".join(data_to_save))
failure_data_file.close()
def wait_for_and_accept_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for and accept an alert. Returns the text from the alert.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
alert = wait_for_and_switch_to_alert(driver, timeout)
alert_text = alert.text
alert.accept()
return alert_text
def wait_for_and_dismiss_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for and dismiss an alert. Returns the text from the alert.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
alert = wait_for_and_switch_to_alert(driver, timeout)
alert_text = alert.text
alert.dismiss()
return alert_text
def wait_for_and_switch_to_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for a browser alert to appear, and switch to it. This should be usable
as a drop-in replacement for driver.switch_to.alert when the alert box
may not exist yet.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
alert = driver.switch_to.alert
# Raises exception if no alert present
dummy_variable = alert.text # noqa
return alert
except NoAlertPresentException:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
message = "Alert was not present after %s seconds!" % timeout
timeout_exception(Exception, message)
def switch_to_frame(driver, frame, timeout=settings.SMALL_TIMEOUT):
"""
Wait for an iframe to appear, and switch to it. This should be
usable as a drop-in replacement for driver.switch_to.frame().
@Params
driver - the webdriver object (required)
frame - the frame element, name, id, index, or selector
timeout - the time to wait for the alert in seconds
"""
from seleniumbase.fixtures import page_utils
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
driver.switch_to.frame(frame)
return True
except NoSuchFrameException:
if type(frame) is str:
by = None
if page_utils.is_xpath_selector(frame):
by = By.XPATH
else:
by = By.CSS_SELECTOR
if is_element_visible(driver, frame, by=by):
try:
element = driver.find_element(by=by, value=frame)
driver.switch_to.frame(element)
return True
except Exception:
pass
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Frame {%s} was not visible after %s second%s!" % (
frame,
timeout,
plural,
)
timeout_exception(Exception, message)
def switch_to_window(driver, window, timeout=settings.SMALL_TIMEOUT):
"""
Wait for a window to appear, and switch to it. This should be usable
as a drop-in replacement for driver.switch_to.window().
@Params
driver - the webdriver object (required)
window - the window index or window handle
timeout - the time to wait for the window in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
if isinstance(window, int):
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
window_handle = driver.window_handles[window]
driver.switch_to.window(window_handle)
return True
except IndexError:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Window {%s} was not present after %s second%s!" % (
window,
timeout,
plural,
)
timeout_exception(Exception, message)
else:
window_handle = window
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
driver.switch_to.window(window_handle)
return True
except NoSuchWindowException:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Window {%s} was not present after %s second%s!" % (
window,
timeout,
plural,
)
timeout_exception(Exception, message)
| 34.551688 | 79 | 0.627965 | 4,135 | 32,755 | 4.844256 | 0.068682 | 0.010733 | 0.024013 | 0.020269 | 0.764016 | 0.715441 | 0.697619 | 0.676551 | 0.642205 | 0.618841 | 0 | 0.013052 | 0.293574 | 32,755 | 947 | 80 | 34.588173 | 0.85263 | 0.3049 | 0 | 0.702572 | 0 | 0 | 0.059616 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045016 | false | 0.003215 | 0.030547 | 0 | 0.125402 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
71f112225e430f89dbed11977e7db13d07773874 | 127 | py | Python | pyclient/zeroos/orchestrator/client/EnumVdiskListItemType.py | 5l1v3r1/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 3 | 2017-07-04T14:02:02.000Z | 2019-07-06T23:34:08.000Z | pyclient/zeroos/orchestrator/client/EnumVdiskListItemType.py | 5l1v3r1/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 497 | 2017-05-31T07:55:40.000Z | 2018-01-03T12:10:43.000Z | pyclient/zeroos/orchestrator/client/EnumVdiskListItemType.py | zero-os/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 8 | 2017-06-14T09:45:56.000Z | 2021-02-01T18:12:55.000Z | from enum import Enum
class EnumVdiskListItemType(Enum):
boot = "boot"
db = "db"
cache = "cache"
tmp = "tmp"
| 14.111111 | 34 | 0.598425 | 15 | 127 | 5.066667 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.283465 | 127 | 8 | 35 | 15.875 | 0.835165 | 0 | 0 | 0 | 0 | 0 | 0.110236 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
71fdc48fd83dd29352fabe9175151e9532625eab | 17,437 | py | Python | salt/states/elasticsearch.py | ipmb/salt | 699912ef9cde28040378aa53d6c7a12d8af756b1 | [
"Apache-2.0"
] | null | null | null | salt/states/elasticsearch.py | ipmb/salt | 699912ef9cde28040378aa53d6c7a12d8af756b1 | [
"Apache-2.0"
] | null | null | null | salt/states/elasticsearch.py | ipmb/salt | 699912ef9cde28040378aa53d6c7a12d8af756b1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
State module to manage Elasticsearch.
.. versionadded:: 2017.7.0
'''
# Import python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.json
log = logging.getLogger(__name__)
def index_absent(name):
'''
Ensure that the named index is absent.
name
Name of the index to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index = __salt__['elasticsearch.index_get'](index=name)
if index and name in index:
if __opts__['test']:
ret['comment'] = 'Index {0} will be removed'.format(name)
ret['changes']['old'] = index[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.index_delete'](index=name)
if ret['result']:
ret['comment'] = 'Successfully removed index {0}'.format(name)
ret['changes']['old'] = index[name]
else:
ret['comment'] = 'Failed to remove index {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Index {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_present(name, definition=None):
'''
Ensure that the named index is present.
name
Name of the index to add
definition
Optional dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html
**Example:**
.. code-block:: yaml
# Default settings
mytestindex:
elasticsearch_index.present
# Extra settings
mytestindex2:
elasticsearch_index.present:
- definition:
settings:
index:
number_of_shards: 10
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_exists = __salt__['elasticsearch.index_exists'](index=name)
if not index_exists:
if __opts__['test']:
ret['comment'] = 'Index {0} does not exist and will be created'.format(name)
ret['changes'] = {'new': definition}
ret['result'] = None
else:
output = __salt__['elasticsearch.index_create'](index=name, body=definition)
if output:
ret['comment'] = 'Successfully created index {0}'.format(name)
ret['changes'] = {'new': __salt__['elasticsearch.index_get'](index=name)[name]}
else:
ret['result'] = False
ret['comment'] = 'Cannot create index {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Index {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def alias_absent(name, index):
'''
Ensure that the index alias is absent.
name
Name of the index alias to remove
index
Name of the index for the alias
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
alias = __salt__['elasticsearch.alias_get'](aliases=name, indices=index)
if alias and alias.get(index, {}).get("aliases", {}).get(name, None) is not None:
if __opts__['test']:
ret['comment'] = 'Alias {0} for index {1} will be removed'.format(name, index)
ret['changes']['old'] = alias.get(index, {}).get("aliases", {}).get(name, {})
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.alias_delete'](aliases=name, indices=index)
if ret['result']:
ret['comment'] = 'Successfully removed alias {0} for index {1}'.format(name, index)
ret['changes']['old'] = alias.get(index, {}).get("aliases", {}).get(name, {})
else:
ret['comment'] = 'Failed to remove alias {0} for index {1} for unknown reasons'.format(name, index)
else:
ret['comment'] = 'Alias {0} for index {1} is already absent'.format(name, index)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def alias_present(name, index, definition=None):
'''
Ensure that the named index alias is present.
name
Name of the alias
index
Name of the index
definition
Optional dict for filters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html
**Example:**
.. code-block:: yaml
mytestalias:
elasticsearch.alias_present:
- index: testindex
- definition:
filter:
term:
user: kimchy
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
alias = __salt__['elasticsearch.alias_get'](aliases=name, indices=index)
old = {}
if alias:
old = alias.get(index, {}).get("aliases", {}).get(name, {})
if not definition:
definition = {}
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not old:
ret['comment'] = 'Alias {0} for index {1} does not exist and will be created'.format(name, index)
else:
ret['comment'] = 'Alias {0} for index {1} exists with wrong configuration and will be overriden'.format(name, index)
ret['result'] = None
else:
output = __salt__['elasticsearch.alias_create'](alias=name, indices=index, body=definition)
if output:
if not old:
ret['comment'] = 'Successfully created alias {0} for index {1}'.format(name, index)
else:
ret['comment'] = 'Successfully replaced alias {0} for index {1}'.format(name, index)
else:
ret['result'] = False
ret['comment'] = 'Cannot create alias {0} for index {1}, {2}'.format(name, index, output)
else:
ret['comment'] = 'Alias {0} for index {1} is already present'.format(name, index)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_template_absent(name):
'''
Ensure that the named index template is absent.
name
Name of the index to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_template = __salt__['elasticsearch.index_template_get'](name=name)
if index_template and name in index_template:
if __opts__['test']:
ret['comment'] = 'Index template {0} will be removed'.format(name)
ret['changes']['old'] = index_template[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.index_template_delete'](name=name)
if ret['result']:
ret['comment'] = 'Successfully removed index template {0}'.format(name)
ret['changes']['old'] = index_template[name]
else:
ret['comment'] = 'Failed to remove index template {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Index template {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_template_present(name, definition, check_definition=False):
'''
Ensure that the named index templat eis present.
name
Name of the index to add
definition
Required dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html
check_definition
If the template already exists and the definition is up to date
**Example:**
.. code-block:: yaml
mytestindex2_template:
elasticsearch_index_template.present:
- definition:
template: logstash-*
order: 1
settings:
number_of_shards: 1
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_template_exists = __salt__['elasticsearch.index_template_exists'](name=name)
if not index_template_exists:
if __opts__['test']:
ret['comment'] = 'Index template {0} does not exist and will be created'.format(name)
ret['changes'] = {'new': definition}
ret['result'] = None
else:
output = __salt__['elasticsearch.index_template_create'](name=name, body=definition)
if output:
ret['comment'] = 'Successfully created index template {0}'.format(name)
ret['changes'] = {'new': __salt__['elasticsearch.index_template_get'](name=name)[name]}
else:
ret['result'] = False
ret['comment'] = 'Cannot create index template {0}, {1}'.format(name, output)
else:
if check_definition:
definition_parsed = salt.utils.json.loads(definition)
current_template = __salt__['elasticsearch.index_template_get'](name=name)[name]
diff = __utils__['dictdiffer.deep_diff'](current_template, definition_parsed)
if len(diff) != 0:
if __opts__['test']:
ret['comment'] = 'Index template {0} exist but need to be updated'.format(name)
ret['changes'] = diff
ret['result'] = None
else:
output = __salt__['elasticsearch.index_template_create'](name=name, body=definition)
if output:
ret['comment'] = 'Successfully updated index template {0}'.format(name)
ret['changes'] = diff
else:
ret['result'] = False
ret['comment'] = 'Cannot update index template {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Index template {0} is already present and up to date'.format(name)
else:
ret['comment'] = 'Index template {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def pipeline_absent(name):
'''
Ensure that the named pipeline is absent
name
Name of the pipeline to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
pipeline = __salt__['elasticsearch.pipeline_get'](id=name)
if pipeline and name in pipeline:
if __opts__['test']:
ret['comment'] = 'Pipeline {0} will be removed'.format(name)
ret['changes']['old'] = pipeline[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.pipeline_delete'](id=name)
if ret['result']:
ret['comment'] = 'Successfully removed pipeline {0}'.format(name)
ret['changes']['old'] = pipeline[name]
else:
ret['comment'] = 'Failed to remove pipeline {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Pipeline {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def pipeline_present(name, definition):
'''
Ensure that the named pipeline is present.
name
Name of the index to add
definition
Required dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/master/pipeline.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.pipeline_present:
- definition:
description: example pipeline
processors:
- set:
field: collector_timestamp_millis
value: '{{ '{{' }}_ingest.timestamp{{ '}}' }}'
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
pipeline = __salt__['elasticsearch.pipeline_get'](id=name)
old = {}
if pipeline and name in pipeline:
old = pipeline[name]
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not pipeline:
ret['comment'] = 'Pipeline {0} does not exist and will be created'.format(name)
else:
ret['comment'] = 'Pipeline {0} exists with wrong configuration and will be overriden'.format(name)
ret['result'] = None
else:
output = __salt__['elasticsearch.pipeline_create'](id=name, body=definition)
if output:
if not pipeline:
ret['comment'] = 'Successfully created pipeline {0}'.format(name)
else:
ret['comment'] = 'Successfully replaced pipeline {0}'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Cannot create pipeline {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Pipeline {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def search_template_absent(name):
'''
Ensure that the search template is absent
name
Name of the search template to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
template = __salt__['elasticsearch.search_template_get'](id=name)
if template:
if __opts__['test']:
ret['comment'] = 'Search template {0} will be removed'.format(name)
ret['changes']['old'] = salt.utils.json.loads(template["template"])
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.search_template_delete'](id=name)
if ret['result']:
ret['comment'] = 'Successfully removed search template {0}'.format(name)
ret['changes']['old'] = salt.utils.json.loads(template["template"])
else:
ret['comment'] = 'Failed to remove search template {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Search template {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def search_template_present(name, definition):
'''
Ensure that the named search template is present.
name
Name of the search template to add
definition
Required dict for creation parameters as per http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.search_template_present:
- definition:
inline:
size: 10
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
template = __salt__['elasticsearch.search_template_get'](id=name)
old = {}
if template:
old = salt.utils.json.loads(template["template"])
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not template:
ret['comment'] = 'Search template {0} does not exist and will be created'.format(name)
else:
ret['comment'] = 'Search template {0} exists with wrong configuration and will be overriden'.format(name)
ret['result'] = None
else:
output = __salt__['elasticsearch.search_template_create'](id=name, body=definition)
if output:
if not template:
ret['comment'] = 'Successfully created search template {0}'.format(name)
else:
ret['comment'] = 'Successfully replaced search template {0}'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Cannot create search template {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Search template {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
| 35.952577 | 142 | 0.540919 | 1,839 | 17,437 | 4.990756 | 0.087548 | 0.065374 | 0.033559 | 0.029636 | 0.811615 | 0.768141 | 0.732186 | 0.643059 | 0.555786 | 0.483983 | 0 | 0.007065 | 0.334347 | 17,437 | 484 | 143 | 36.02686 | 0.783665 | 0.174055 | 0 | 0.654545 | 0 | 0 | 0.291086 | 0.050755 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036364 | false | 0 | 0.010909 | 0 | 0.083636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
9c0cd2882736e6f88623dfdace36a6bac2f42a57 | 73 | py | Python | as5env/lib/python3.6/site-packages/spacy/git_info.py | RasmusVestiH/RV_cds_language2 | dc548a00985d24affc0411cec2e471b4db2a95be | [
"MIT"
] | null | null | null | as5env/lib/python3.6/site-packages/spacy/git_info.py | RasmusVestiH/RV_cds_language2 | dc548a00985d24affc0411cec2e471b4db2a95be | [
"MIT"
] | null | null | null | as5env/lib/python3.6/site-packages/spacy/git_info.py | RasmusVestiH/RV_cds_language2 | dc548a00985d24affc0411cec2e471b4db2a95be | [
"MIT"
] | null | null | null | # THIS FILE IS GENERATED FROM SPACY SETUP.PY
#
GIT_VERSION = "53a3b967a"
| 18.25 | 44 | 0.753425 | 11 | 73 | 4.909091 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098361 | 0.164384 | 73 | 3 | 45 | 24.333333 | 0.786885 | 0.575342 | 0 | 0 | 1 | 0 | 0.321429 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
9c116c0cfe166b65603db4c0c8f49ac81e14be05 | 375 | py | Python | mmtrack/models/trackers/__init__.py | BigBen0519/mmtracking | 61509b301ccbc2ab14f82a682b94c56f82ce09de | [
"Apache-2.0"
] | 2,226 | 2021-01-04T11:13:01.000Z | 2022-03-31T11:49:59.000Z | mmtrack/models/trackers/__init__.py | BigBen0519/mmtracking | 61509b301ccbc2ab14f82a682b94c56f82ce09de | [
"Apache-2.0"
] | 300 | 2021-01-04T11:36:59.000Z | 2022-03-31T07:48:28.000Z | mmtrack/models/trackers/__init__.py | BigBen0519/mmtracking | 61509b301ccbc2ab14f82a682b94c56f82ce09de | [
"Apache-2.0"
] | 333 | 2021-01-04T11:35:12.000Z | 2022-03-31T08:11:50.000Z | # Copyright (c) OpenMMLab. All rights reserved.
from .base_tracker import BaseTracker
from .byte_tracker import ByteTracker
from .masktrack_rcnn_tracker import MaskTrackRCNNTracker
from .sort_tracker import SortTracker
from .tracktor_tracker import TracktorTracker
__all__ = [
'BaseTracker', 'TracktorTracker', 'SortTracker', 'MaskTrackRCNNTracker',
'ByteTracker'
]
| 31.25 | 76 | 0.808 | 38 | 375 | 7.710526 | 0.526316 | 0.221843 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12 | 375 | 11 | 77 | 34.090909 | 0.887879 | 0.12 | 0 | 0 | 0 | 0 | 0.207317 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.555556 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
9c16542570312b36a03dc73da8094fa1e73ef6d4 | 122 | py | Python | Exercicios/Mundo1/ex010.py | mpaullos/cursoemvideo-python | 80732626b6b5471ec7fea6dc01d83931e5cfd8fb | [
"MIT"
] | null | null | null | Exercicios/Mundo1/ex010.py | mpaullos/cursoemvideo-python | 80732626b6b5471ec7fea6dc01d83931e5cfd8fb | [
"MIT"
] | null | null | null | Exercicios/Mundo1/ex010.py | mpaullos/cursoemvideo-python | 80732626b6b5471ec7fea6dc01d83931e5cfd8fb | [
"MIT"
] | null | null | null | d = float (input ('Quanto dinheiro você tem na carteira? R$'))
print('Com {} você pode comprar U${:.2f}'.format(d,d/3.27)) | 61 | 62 | 0.655738 | 22 | 122 | 3.636364 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.037736 | 0.131148 | 122 | 2 | 63 | 61 | 0.716981 | 0 | 0 | 0 | 0 | 0 | 0.593496 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
9c2c736a9be3c0d9b90a3ca0de57f03feafd2cef | 497 | py | Python | openprocurement/auctions/dgf/views/financial/item.py | openprocurement/openprocurement.auctions.dgf | 87d8d5b674830f2f0954845637835b1dca6204bb | [
"Apache-2.0"
] | 2 | 2016-09-08T11:49:24.000Z | 2016-09-27T12:10:36.000Z | openprocurement/auctions/dgf/views/financial/item.py | openprocurement/openprocurement.auctions.dgf | 87d8d5b674830f2f0954845637835b1dca6204bb | [
"Apache-2.0"
] | 88 | 2016-09-19T08:27:53.000Z | 2022-03-21T22:16:22.000Z | openprocurement/auctions/dgf/views/financial/item.py | openprocurement/openprocurement.auctions.dgf | 87d8d5b674830f2f0954845637835b1dca6204bb | [
"Apache-2.0"
] | 15 | 2016-09-08T08:42:42.000Z | 2020-02-07T11:09:37.000Z | # -*- coding: utf-8 -*-
from openprocurement.auctions.core.utils import opresource
from openprocurement.auctions.core.endpoints import ENDPOINTS
from openprocurement.auctions.dgf.views.other.item import AuctionItemResource
@opresource(
name='dgfFinancialAssets:Auction Items',
collection_path=ENDPOINTS['items'],
path=ENDPOINTS['item'],
auctionsprocurementMethodType="dgfFinancialAssets",
description="Auction items")
class AuctionItemResource(AuctionItemResource):
pass
| 33.133333 | 77 | 0.790744 | 47 | 497 | 8.340426 | 0.553191 | 0.145408 | 0.206633 | 0.158163 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002257 | 0.108652 | 497 | 14 | 78 | 35.5 | 0.882619 | 0.042254 | 0 | 0 | 0 | 0 | 0.151899 | 0.054852 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.090909 | 0.272727 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
9c41e54a79278a4ccec49ba7af45daa9cca6270c | 84 | py | Python | HW8_task8.py | DaniilBaldin/Hillel_learning | 49748cebd7d15f04942b40de54cc563e313649fe | [
"MIT"
] | null | null | null | HW8_task8.py | DaniilBaldin/Hillel_learning | 49748cebd7d15f04942b40de54cc563e313649fe | [
"MIT"
] | null | null | null | HW8_task8.py | DaniilBaldin/Hillel_learning | 49748cebd7d15f04942b40de54cc563e313649fe | [
"MIT"
] | null | null | null | test_list = [1, 2, 5, 7, 9, 11]
for i in reversed(test_list):
print(i, end=' ')
| 21 | 31 | 0.571429 | 17 | 84 | 2.705882 | 0.823529 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107692 | 0.22619 | 84 | 3 | 32 | 28 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0.011905 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
9c4f25425523b0cf0f0faca3a17550251aa54960 | 600 | py | Python | xiaomirouter/status/mem.py | RiRomain/python-xiaomi-router | 36867d077349a70678db75cf261428cdd80a0c51 | [
"MIT"
] | null | null | null | xiaomirouter/status/mem.py | RiRomain/python-xiaomi-router | 36867d077349a70678db75cf261428cdd80a0c51 | [
"MIT"
] | null | null | null | xiaomirouter/status/mem.py | RiRomain/python-xiaomi-router | 36867d077349a70678db75cf261428cdd80a0c51 | [
"MIT"
] | null | null | null | """ Router memory information """
class Mem(object):
""" Router memory information """
def __init__(self, usage, total, hz, type):
self._usage = usage
self._total = total
self._hz = hz
self._type = type
def get_usage(self):
return self._usage
def get_total(self):
return self._total
def get_hz(self):
return self._hz
def get_type(self):
return self._type
def create_mem_from_json(json_entry):
return Mem(json_entry['usage'], json_entry['total'], json_entry['hz'],
json_entry['type'])
| 20.689655 | 74 | 0.601667 | 77 | 600 | 4.376623 | 0.246753 | 0.133531 | 0.166172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.281667 | 600 | 28 | 75 | 21.428571 | 0.781903 | 0.086667 | 0 | 0 | 0 | 0 | 0.029963 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.352941 | false | 0 | 0 | 0.294118 | 0.705882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
9c55abd041bde87f843c534075f667ce454b3386 | 362 | py | Python | euclidean.py | smarsu/facenet | a0fa3ffe32e295b4cc980a4a178593cc7f1bad12 | [
"MIT"
] | null | null | null | euclidean.py | smarsu/facenet | a0fa3ffe32e295b4cc980a4a178593cc7f1bad12 | [
"MIT"
] | null | null | null | euclidean.py | smarsu/facenet | a0fa3ffe32e295b4cc980a4a178593cc7f1bad12 | [
"MIT"
] | null | null | null | # --------------------------------------------------------
# FaceNet Datasets
# Licensed under The MIT License [see LICENSE for details]
# Copyright 2019 smarsu. All Rights Reserved.
# --------------------------------------------------------
import numpy as np
def euclidean_distance(a, b):
""""""
return np.sqrt(np.sum(np.square(a - b)))
| 27.846154 | 59 | 0.447514 | 35 | 362 | 4.6 | 0.828571 | 0.024845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013201 | 0.162983 | 362 | 12 | 60 | 30.166667 | 0.518152 | 0.638122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
9c82b8151b330bf8d38d148adbcbee9af3abba01 | 120 | py | Python | exercise025.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | exercise025.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | exercise025.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | nome = input('Qual é o seu nome completo: ')
print('Seu nome tem Silva? {}.'.format(nome.lower().find('silva') != -1))
| 30 | 73 | 0.625 | 19 | 120 | 3.947368 | 0.736842 | 0.186667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009709 | 0.141667 | 120 | 3 | 74 | 40 | 0.718447 | 0 | 0 | 0 | 0 | 0 | 0.466667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 3 |
92c5f1e99adfc26f737aa447b2474912686d166c | 50 | py | Python | 01_Language/05_Python/tests/module_/c/__init__.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 3 | 2020-06-28T07:42:51.000Z | 2021-01-15T10:32:11.000Z | 01_Language/05_Python/tests/module_/c/__init__.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 9 | 2021-03-10T22:45:40.000Z | 2022-02-27T06:53:20.000Z | 01_Language/05_Python/tests/module_/c/__init__.py | cliff363825/TwentyFour | 09df59bd5d275e66463e343647f46027397d1233 | [
"MIT"
] | 1 | 2021-01-15T10:51:24.000Z | 2021-01-15T10:51:24.000Z | # coding: utf-8
__all__ = ['c']
from . import *
| 8.333333 | 15 | 0.56 | 7 | 50 | 3.428571 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026316 | 0.24 | 50 | 5 | 16 | 10 | 0.605263 | 0.26 | 0 | 0 | 0 | 0 | 0.028571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
92c778776510170a4c44b0c22806f8d1db24d536 | 1,859 | py | Python | clockIn/clockIn_action.py | wd665544/bupt-clockIn | 8956646d2c19ef75e6106fe1594c43d2b32768b4 | [
"MIT"
] | 4 | 2021-10-02T04:27:35.000Z | 2022-01-07T04:25:04.000Z | clockIn/clockIn_action.py | wd665544/bupt-clockIn | 8956646d2c19ef75e6106fe1594c43d2b32768b4 | [
"MIT"
] | null | null | null | clockIn/clockIn_action.py | wd665544/bupt-clockIn | 8956646d2c19ef75e6106fe1594c43d2b32768b4 | [
"MIT"
] | 11 | 2021-11-25T03:57:31.000Z | 2022-03-31T15:37:32.000Z | import json
import os
import time
from datetime import datetime
import pytz
import req_model
def main():
tz = pytz.timezone('Asia/Shanghai')
data = json.loads(os.environ['DATA'])
for item in data:
for i in range(3):
time.sleep(i * 5)
print("now {} clock in {}:".format(item, i))
if data[item]["username"] != "" and data[item]["password"] != "":
msg = req_model.upload(data[item]["username"], data[item]["password"])
if msg == "":
print("{} 打卡失败!!".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M')))
req_model.push_msg(
"{} 打卡失败!!".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M')), data[item])
elif json.loads(msg)["m"] == "今天已经填报了" or json.loads(msg)["m"] == "操作成功":
print(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]))
req_model.push_msg(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]), data[item])
break
else:
print(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]))
req_model.push_msg(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]), data[item])
else:
print("{}'s username or password is null".format(item))
break
if __name__ == '__main__':
main()
| 41.311111 | 119 | 0.465304 | 196 | 1,859 | 4.331633 | 0.270408 | 0.074205 | 0.190813 | 0.212014 | 0.507656 | 0.489988 | 0.489988 | 0.489988 | 0.489988 | 0.489988 | 0 | 0.001664 | 0.353416 | 1,859 | 44 | 120 | 42.25 | 0.704659 | 0 | 0 | 0.435897 | 0 | 0 | 0.104357 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025641 | false | 0.076923 | 0.153846 | 0 | 0.179487 | 0.128205 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
92e66ca6fc483f8758d84fa0c1d58a72ae27b5fb | 2,358 | py | Python | dev/local/data/source.py | erikgaas/fastai_dev | cbd2166fd639365068c337b5b71d9feaf9523dd4 | [
"Apache-2.0"
] | null | null | null | dev/local/data/source.py | erikgaas/fastai_dev | cbd2166fd639365068c337b5b71d9feaf9523dd4 | [
"Apache-2.0"
] | null | null | null | dev/local/data/source.py | erikgaas/fastai_dev | cbd2166fd639365068c337b5b71d9feaf9523dd4 | [
"Apache-2.0"
] | null | null | null | #AUTOGENERATED! DO NOT EDIT! File to edit: dev/06_data_source.ipynb (unless otherwise specified).
__all__ = ['DataSource']
from ..imports import *
from ..test import *
from ..core import *
from .core import *
from .transform import *
from .pipeline import *
from ..notebook.showdoc import show_doc
def _mk_subset(self, i):
tfms = [o.tfms for o in self.tls]
return TfmdDS(L._gets(self, self.filts[i]), tfms=tfms, do_setup=False, filt=i)
class _FiltTfmdList(TfmdList):
"Like `TfmdList` but with filters and train/valid attribute, for proper setup"
def __init__(self, dsrc, tfms, do_setup=True):
self.filt_idx = dsrc.filt_idx
super().__init__(dsrc.items, tfms, do_setup=do_setup, as_item=True, filt=None)
def subset(self, i): return _mk_subset(self, i)
def _get(self, i):
self.filt = self.filt_idx[i]
return super()._get(i)
_FiltTfmdList.train,_FiltTfmdList.valid = add_props(lambda i,x: x.subset(i), 2)
class DataSource(TfmdDS):
"Applies a `tfm` to filtered subsets of `items`"
def __init__(self, items, tfms=None, filts=None, do_setup=True):
super(TfmdDS,self).__init__(items, use_list=None)
if filts is None: filts = [range_of(items)]
self.filts = L(mask2idxs(filt) for filt in filts)
# Create map from item id to filter id
assert all_disjoint(self.filts)
self.filt_idx = L([None]*len(self.items))
for i,f in enumerate(self.filts): self.filt_idx[f] = i
self.tls = [_FiltTfmdList(self, t, do_setup=do_setup) for t in L(tfms)]
def __repr__(self): return '\n'.join(map(str,self.subsets())) + f'\ntls - {self.tls}'
def subsets(self): return map(self.subset, range_of(self.filts))
def subset(self, i): return _mk_subset(self, i)
def _get(self, i):
self.filt = self.filt_idx[i]
return super()._get(i)
@delegates(TfmdDL.__init__)
def databunch(self, bs=16, val_bs=None, shuffle_train=True, **kwargs):
n = len(self.filts)-1
bss = [bs] + [2*bs]*n if val_bs is None else [bs] + [val_bs]*n
shuffles = [shuffle_train] + [False]*n
return DataBunch(*[TfmdDL(self.subset(i), bs=b, shuffle=s, drop_last=s, **kwargs)
for i,(b,s) in enumerate(zip(bss, shuffles))])
DataSource.train,DataSource.valid = add_props(lambda i,x: x.subset(i), 2) | 40.655172 | 97 | 0.658609 | 366 | 2,358 | 4.046448 | 0.31694 | 0.023633 | 0.037137 | 0.026334 | 0.199865 | 0.147198 | 0.147198 | 0.147198 | 0.147198 | 0.147198 | 0 | 0.00478 | 0.201442 | 2,358 | 58 | 98 | 40.655172 | 0.781731 | 0.108991 | 0 | 0.177778 | 1 | 0 | 0.068345 | 0 | 0 | 0 | 0 | 0 | 0.022222 | 1 | 0.222222 | false | 0 | 0.155556 | 0.088889 | 0.511111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
130722b8855c804cc81295f51aa08443b7a05b4e | 279 | py | Python | introduction-to-python/numpy/script_01.py | nhutnamhcmus/datacamp-playground | 25457e813b1145e1d335562286715eeddd1c1a7b | [
"MIT"
] | 1 | 2021-05-08T11:09:27.000Z | 2021-05-08T11:09:27.000Z | introduction-to-python/numpy/script_01.py | nhutnamhcmus/datacamp-playground | 25457e813b1145e1d335562286715eeddd1c1a7b | [
"MIT"
] | 1 | 2022-03-12T15:42:14.000Z | 2022-03-12T15:42:14.000Z | introduction-to-python/numpy/script_01.py | nhutnamhcmus/datacamp-playground | 25457e813b1145e1d335562286715eeddd1c1a7b | [
"MIT"
] | 1 | 2021-04-30T18:24:19.000Z | 2021-04-30T18:24:19.000Z | # Create list baseball
baseball = [180, 215, 210, 210, 188, 176, 209, 200]
# Import the numpy package as np
import numpy as np
# Create a numpy array from baseball: np_baseball
np_baseball = np.array(baseball)
# Print out type of np_baseball
print(type(np_baseball)) | 25.363636 | 52 | 0.72043 | 45 | 279 | 4.377778 | 0.511111 | 0.203046 | 0.182741 | 0.203046 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107143 | 0.197133 | 279 | 11 | 53 | 25.363636 | 0.772321 | 0.462366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
13250f0b102ecfa4cecd316b2ae4d3baf367a558 | 21,319 | py | Python | sdk/python/pulumi_azure_native/network/v20171101/connection_monitor.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20171101/connection_monitor.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/network/v20171101/connection_monitor.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ConnectionMonitorArgs', 'ConnectionMonitor']
@pulumi.input_type
class ConnectionMonitorArgs:
def __init__(__self__, *,
destination: pulumi.Input['ConnectionMonitorDestinationArgs'],
network_watcher_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
source: pulumi.Input['ConnectionMonitorSourceArgs'],
auto_start: Optional[pulumi.Input[bool]] = None,
connection_monitor_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
monitoring_interval_in_seconds: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ConnectionMonitor resource.
:param pulumi.Input['ConnectionMonitorDestinationArgs'] destination: Describes the destination of connection monitor.
:param pulumi.Input[str] network_watcher_name: The name of the Network Watcher resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group containing Network Watcher.
:param pulumi.Input['ConnectionMonitorSourceArgs'] source: Describes the source of connection monitor.
:param pulumi.Input[bool] auto_start: Determines if the connection monitor will start automatically once created.
:param pulumi.Input[str] connection_monitor_name: The name of the connection monitor.
:param pulumi.Input[str] location: Connection monitor location.
:param pulumi.Input[int] monitoring_interval_in_seconds: Monitoring interval in seconds.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Connection monitor tags.
"""
pulumi.set(__self__, "destination", destination)
pulumi.set(__self__, "network_watcher_name", network_watcher_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "source", source)
if auto_start is None:
auto_start = True
if auto_start is not None:
pulumi.set(__self__, "auto_start", auto_start)
if connection_monitor_name is not None:
pulumi.set(__self__, "connection_monitor_name", connection_monitor_name)
if location is not None:
pulumi.set(__self__, "location", location)
if monitoring_interval_in_seconds is None:
monitoring_interval_in_seconds = 60
if monitoring_interval_in_seconds is not None:
pulumi.set(__self__, "monitoring_interval_in_seconds", monitoring_interval_in_seconds)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def destination(self) -> pulumi.Input['ConnectionMonitorDestinationArgs']:
"""
Describes the destination of connection monitor.
"""
return pulumi.get(self, "destination")
@destination.setter
def destination(self, value: pulumi.Input['ConnectionMonitorDestinationArgs']):
pulumi.set(self, "destination", value)
@property
@pulumi.getter(name="networkWatcherName")
def network_watcher_name(self) -> pulumi.Input[str]:
"""
The name of the Network Watcher resource.
"""
return pulumi.get(self, "network_watcher_name")
@network_watcher_name.setter
def network_watcher_name(self, value: pulumi.Input[str]):
pulumi.set(self, "network_watcher_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group containing Network Watcher.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def source(self) -> pulumi.Input['ConnectionMonitorSourceArgs']:
"""
Describes the source of connection monitor.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: pulumi.Input['ConnectionMonitorSourceArgs']):
pulumi.set(self, "source", value)
@property
@pulumi.getter(name="autoStart")
def auto_start(self) -> Optional[pulumi.Input[bool]]:
"""
Determines if the connection monitor will start automatically once created.
"""
return pulumi.get(self, "auto_start")
@auto_start.setter
def auto_start(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_start", value)
@property
@pulumi.getter(name="connectionMonitorName")
def connection_monitor_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the connection monitor.
"""
return pulumi.get(self, "connection_monitor_name")
@connection_monitor_name.setter
def connection_monitor_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_monitor_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Connection monitor location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="monitoringIntervalInSeconds")
def monitoring_interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Monitoring interval in seconds.
"""
return pulumi.get(self, "monitoring_interval_in_seconds")
@monitoring_interval_in_seconds.setter
def monitoring_interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "monitoring_interval_in_seconds", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Connection monitor tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class ConnectionMonitor(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_start: Optional[pulumi.Input[bool]] = None,
connection_monitor_name: Optional[pulumi.Input[str]] = None,
destination: Optional[pulumi.Input[pulumi.InputType['ConnectionMonitorDestinationArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
monitoring_interval_in_seconds: Optional[pulumi.Input[int]] = None,
network_watcher_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[pulumi.InputType['ConnectionMonitorSourceArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Information about the connection monitor.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] auto_start: Determines if the connection monitor will start automatically once created.
:param pulumi.Input[str] connection_monitor_name: The name of the connection monitor.
:param pulumi.Input[pulumi.InputType['ConnectionMonitorDestinationArgs']] destination: Describes the destination of connection monitor.
:param pulumi.Input[str] location: Connection monitor location.
:param pulumi.Input[int] monitoring_interval_in_seconds: Monitoring interval in seconds.
:param pulumi.Input[str] network_watcher_name: The name of the Network Watcher resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group containing Network Watcher.
:param pulumi.Input[pulumi.InputType['ConnectionMonitorSourceArgs']] source: Describes the source of connection monitor.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Connection monitor tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ConnectionMonitorArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Information about the connection monitor.
:param str resource_name: The name of the resource.
:param ConnectionMonitorArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ConnectionMonitorArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_start: Optional[pulumi.Input[bool]] = None,
connection_monitor_name: Optional[pulumi.Input[str]] = None,
destination: Optional[pulumi.Input[pulumi.InputType['ConnectionMonitorDestinationArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
monitoring_interval_in_seconds: Optional[pulumi.Input[int]] = None,
network_watcher_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[pulumi.InputType['ConnectionMonitorSourceArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ConnectionMonitorArgs.__new__(ConnectionMonitorArgs)
if auto_start is None:
auto_start = True
__props__.__dict__["auto_start"] = auto_start
__props__.__dict__["connection_monitor_name"] = connection_monitor_name
if destination is None and not opts.urn:
raise TypeError("Missing required property 'destination'")
__props__.__dict__["destination"] = destination
__props__.__dict__["location"] = location
if monitoring_interval_in_seconds is None:
monitoring_interval_in_seconds = 60
__props__.__dict__["monitoring_interval_in_seconds"] = monitoring_interval_in_seconds
if network_watcher_name is None and not opts.urn:
raise TypeError("Missing required property 'network_watcher_name'")
__props__.__dict__["network_watcher_name"] = network_watcher_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if source is None and not opts.urn:
raise TypeError("Missing required property 'source'")
__props__.__dict__["source"] = source
__props__.__dict__["tags"] = tags
__props__.__dict__["etag"] = None
__props__.__dict__["monitoring_status"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["start_time"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20171101:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20171001:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20171001:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180101:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180101:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180201:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180201:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180401:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180401:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180601:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180601:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180701:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180701:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20180801:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20180801:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20181001:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20181001:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20181101:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20181101:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20181201:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20181201:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190201:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190201:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190401:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190401:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190601:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190601:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190701:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190701:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190801:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190801:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20190901:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20190901:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20191101:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20191101:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20191201:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20191201:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200301:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200301:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200401:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200401:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200501:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200501:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200601:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200601:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200701:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200701:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20200801:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20200801:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20201101:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20201101:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20210201:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20210201:ConnectionMonitor"), pulumi.Alias(type_="azure-native:network/v20210301:ConnectionMonitor"), pulumi.Alias(type_="azure-nextgen:network/v20210301:ConnectionMonitor")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ConnectionMonitor, __self__).__init__(
'azure-native:network/v20171101:ConnectionMonitor',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ConnectionMonitor':
"""
Get an existing ConnectionMonitor resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ConnectionMonitorArgs.__new__(ConnectionMonitorArgs)
__props__.__dict__["auto_start"] = None
__props__.__dict__["destination"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["location"] = None
__props__.__dict__["monitoring_interval_in_seconds"] = None
__props__.__dict__["monitoring_status"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["source"] = None
__props__.__dict__["start_time"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return ConnectionMonitor(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autoStart")
def auto_start(self) -> pulumi.Output[Optional[bool]]:
"""
Determines if the connection monitor will start automatically once created.
"""
return pulumi.get(self, "auto_start")
@property
@pulumi.getter
def destination(self) -> pulumi.Output['outputs.ConnectionMonitorDestinationResponse']:
"""
Describes the destination of connection monitor.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "etag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Connection monitor location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitoringIntervalInSeconds")
def monitoring_interval_in_seconds(self) -> pulumi.Output[Optional[int]]:
"""
Monitoring interval in seconds.
"""
return pulumi.get(self, "monitoring_interval_in_seconds")
@property
@pulumi.getter(name="monitoringStatus")
def monitoring_status(self) -> pulumi.Output[Optional[str]]:
"""
The monitoring status of the connection monitor.
"""
return pulumi.get(self, "monitoring_status")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the connection monitor.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The provisioning state of the connection monitor.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def source(self) -> pulumi.Output['outputs.ConnectionMonitorSourceResponse']:
"""
Describes the source of connection monitor.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> pulumi.Output[Optional[str]]:
"""
The date and time when the connection monitor was started.
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Connection monitor tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Connection monitor type.
"""
return pulumi.get(self, "type")
| 53.2975 | 4,166 | 0.690839 | 2,281 | 21,319 | 6.176677 | 0.085927 | 0.056995 | 0.060686 | 0.080914 | 0.76542 | 0.715239 | 0.663141 | 0.3842 | 0.35368 | 0.29633 | 0 | 0.026538 | 0.199306 | 21,319 | 399 | 4,167 | 53.431078 | 0.798828 | 0.168254 | 0 | 0.41502 | 1 | 0 | 0.268514 | 0.211203 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142292 | false | 0.003953 | 0.027668 | 0.003953 | 0.264822 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
1336edda11f2e6385bf9d207e48901123afd0985 | 341 | py | Python | setup.py | rickynilsson/astroquery | b7edec0d8e36b11c25baa39ad72e4160bc30d465 | [
"BSD-3-Clause"
] | 577 | 2015-02-12T18:23:49.000Z | 2022-03-22T21:38:58.000Z | setup.py | rickynilsson/astroquery | b7edec0d8e36b11c25baa39ad72e4160bc30d465 | [
"BSD-3-Clause"
] | 1,812 | 2015-01-01T08:02:20.000Z | 2022-03-31T13:03:52.000Z | setup.py | rickynilsson/astroquery | b7edec0d8e36b11c25baa39ad72e4160bc30d465 | [
"BSD-3-Clause"
] | 322 | 2015-02-23T19:31:29.000Z | 2022-03-25T18:51:30.000Z | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import sys
# Workaround for https://github.com/pypa/pip/issues/6163
sys.path.insert(0, os.path.dirname(__file__))
import ah_bootstrap
import builtins
builtins._ASTROPY_SETUP_ = True
from astropy_helpers.setup_helpers import setup
setup()
| 17.947368 | 63 | 0.782991 | 53 | 341 | 4.849057 | 0.735849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020067 | 0.123167 | 341 | 18 | 64 | 18.944444 | 0.839465 | 0.40176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.625 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
13398920ac4247964b70b8b29ada888f2c68d744 | 1,139 | py | Python | abc/abc087/abc087d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc087/abc087d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc087/abc087d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | # Union Find 木
from sys import setrecursionlimit, stdin
def find(parent, diff_weight, i):
t = parent[i]
if t < 0:
return i
t = find(parent, diff_weight, t)
diff_weight[i] += diff_weight[parent[i]]
parent[i] = t
return t
def weight(parent, diff_weight, i):
find(parent, diff_weight, i)
return diff_weight[i]
def unite(parent, diff_weight, i, j, d):
d -= weight(parent, diff_weight, j)
d += weight(parent, diff_weight, i)
i = find(parent, diff_weight, i)
j = find(parent, diff_weight, j)
if i == j:
return
diff_weight[j] = d
parent[i] += parent[j]
parent[j] = i
setrecursionlimit(10 ** 6)
N, M = map(int, stdin.readline().split())
LRD = [tuple(map(int, stdin.readline().split())) for _ in range(M)]
parent = [-1] * (N + 1)
diff_weight = [0] * (N + 1)
for L, R, D in LRD:
i = find(parent, diff_weight, L)
j = find(parent, diff_weight, R)
if i != j:
unite(parent, diff_weight, L, R, D)
else:
if weight(parent, diff_weight, L) + D != weight(parent, diff_weight, R):
print('No')
exit()
print('Yes')
| 23.244898 | 80 | 0.584723 | 178 | 1,139 | 3.629213 | 0.230337 | 0.294118 | 0.346749 | 0.216718 | 0.442724 | 0.068111 | 0 | 0 | 0 | 0 | 0 | 0.009524 | 0.262511 | 1,139 | 48 | 81 | 23.729167 | 0.759524 | 0.010536 | 0 | 0 | 0 | 0 | 0.004444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.081081 | false | 0 | 0.027027 | 0 | 0.216216 | 0.054054 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
133c2774be187b372c083d7d040764b04418d113 | 391 | py | Python | tests/peeringdb_mock/resource.py | peeringdb/django-peeringdb | 4b14fb5e9fecaf2fa6e6996e334a8d211be9604d | [
"Apache-2.0"
] | 22 | 2015-11-10T04:55:42.000Z | 2022-03-16T19:23:20.000Z | tests/peeringdb_mock/resource.py | pigen00/django-peeringdb | b4cd86d6700f1d85d3f39f672184c347edbfb45c | [
"Apache-2.0"
] | 65 | 2016-03-21T12:29:52.000Z | 2022-03-04T13:37:57.000Z | tests/peeringdb_mock/resource.py | pigen00/django-peeringdb | b4cd86d6700f1d85d3f39f672184c347edbfb45c | [
"Apache-2.0"
] | 21 | 2016-03-09T02:40:49.000Z | 2022-02-26T21:10:12.000Z | # mock resource classes from peeringdb-py client
class Organization:
pass
class Facility:
pass
class Network:
pass
class InternetExchange:
pass
class InternetExchangeFacility:
pass
class InternetExchangeLan:
pass
class InternetExchangeLanPrefix:
pass
class NetworkFacility:
pass
class NetworkIXLan:
pass
class NetworkContact:
pass
| 9.309524 | 48 | 0.721228 | 37 | 391 | 7.621622 | 0.513514 | 0.287234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.248082 | 391 | 41 | 49 | 9.536585 | 0.959184 | 0.117647 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
135c39a34ccbd6bf78217806473799d138968e19 | 16,481 | py | Python | pysnmp/HP-ICF-VRRP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/HP-ICF-VRRP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/HP-ICF-VRRP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module HP-ICF-VRRP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HP-ICF-VRRP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:23:26 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection")
hpSwitch, = mibBuilder.importSymbols("HP-ICF-OID", "hpSwitch")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
ModuleIdentity, iso, Integer32, NotificationType, Counter64, Counter32, TimeTicks, Gauge32, Unsigned32, ObjectIdentity, Bits, IpAddress, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "iso", "Integer32", "NotificationType", "Counter64", "Counter32", "TimeTicks", "Gauge32", "Unsigned32", "ObjectIdentity", "Bits", "IpAddress", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TruthValue, RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "RowStatus", "TextualConvention", "DisplayString")
vrrpOperVrId, vrrpAssoIpAddrEntry, vrrpOperEntry = mibBuilder.importSymbols("VRRP-MIB", "vrrpOperVrId", "vrrpAssoIpAddrEntry", "vrrpOperEntry")
hpicfVrrpMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31))
hpicfVrrpMIB.setRevisions(('2012-11-15 00:00', '2013-06-12 00:00', '2012-02-22 00:00', '2010-10-20 00:00', '2010-07-28 00:00', '2009-05-19 00:00', '2008-02-20 00:00', '2007-12-12 00:00', '2007-08-22 00:00', '2005-07-14 00:00',))
if mibBuilder.loadTexts: hpicfVrrpMIB.setLastUpdated('201211150000Z')
if mibBuilder.loadTexts: hpicfVrrpMIB.setOrganization('HP Networking')
hpicfVrrpOperations = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1))
hpicfVrrpConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2))
hpicfVrrpAdminStatus = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hpicfVrrpAdminStatus.setStatus('deprecated')
hpicfVrrpOperTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2), )
if mibBuilder.loadTexts: hpicfVrrpOperTable.setStatus('current')
hpicfVrrpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1), )
vrrpOperEntry.registerAugmentions(("HP-ICF-VRRP-MIB", "hpicfVrrpOperEntry"))
hpicfVrrpOperEntry.setIndexNames(*vrrpOperEntry.getIndexNames())
if mibBuilder.loadTexts: hpicfVrrpOperEntry.setStatus('current')
hpicfVrrpVrMode = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("owner", 1), ("backup", 2), ("uninitialized", 3))).clone('uninitialized')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrMode.setStatus('current')
hpicfVrrpVrMasterPreempt = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 2), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrMasterPreempt.setStatus('current')
hpicfVrrpVrTransferControl = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 3), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrTransferControl.setStatus('current')
hpicfVrrpVrPreemptDelayTime = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 600))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrPreemptDelayTime.setStatus('current')
hpicfVrrpVrControl = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("failback", 1), ("failover", 2), ("failoverWithMonitoring", 3), ("invalid", 4))).clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrControl.setStatus('current')
hpicfVrrpVrRespondToPing = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 2, 1, 6), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpVrRespondToPing.setStatus('current')
hpicfVrrpAssoIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 3), )
if mibBuilder.loadTexts: hpicfVrrpAssoIpAddrTable.setStatus('current')
hpicfVrrpAssoIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 3, 1), )
vrrpAssoIpAddrEntry.registerAugmentions(("HP-ICF-VRRP-MIB", "hpicfVrrpAssoIpAddrEntry"))
hpicfVrrpAssoIpAddrEntry.setIndexNames(*vrrpAssoIpAddrEntry.getIndexNames())
if mibBuilder.loadTexts: hpicfVrrpAssoIpAddrEntry.setStatus('current')
hpicfVrrpAssoIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 3, 1, 1), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpAssoIpMask.setStatus('current')
hpicfVrrpTrackTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5), )
if mibBuilder.loadTexts: hpicfVrrpTrackTable.setStatus('current')
hpicfVrrpTrackEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "VRRP-MIB", "vrrpOperVrId"), (0, "HP-ICF-VRRP-MIB", "hpicfVrrpVrTrackType"), (0, "HP-ICF-VRRP-MIB", "hpicfVrrpVrTrackEntity"))
if mibBuilder.loadTexts: hpicfVrrpTrackEntry.setStatus('current')
hpicfVrrpVrTrackType = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("port", 1), ("trunk", 2), ("vlan", 3))))
if mibBuilder.loadTexts: hpicfVrrpVrTrackType.setStatus('current')
hpicfVrrpVrTrackEntity = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 255)))
if mibBuilder.loadTexts: hpicfVrrpVrTrackEntity.setStatus('current')
hpicfVrrpTrackRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpicfVrrpTrackRowStatus.setStatus('current')
hpicfVrrpTrackState = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("down", 0), ("up", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpicfVrrpTrackState.setStatus('current')
hpicfVrrpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 6), )
if mibBuilder.loadTexts: hpicfVrrpStatsTable.setStatus('current')
hpicfVrrpRespondToPing = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hpicfVrrpRespondToPing.setStatus('deprecated')
hpicfVrrpRemoveConfig = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 8), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hpicfVrrpRemoveConfig.setStatus('deprecated')
hpicfVrrpNonstop = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 9), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hpicfVrrpNonstop.setStatus('deprecated')
hpicfVrrpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 6, 1), )
vrrpOperEntry.registerAugmentions(("HP-ICF-VRRP-MIB", "hpicfVrrpStatsEntry"))
hpicfVrrpStatsEntry.setIndexNames(*vrrpOperEntry.getIndexNames())
if mibBuilder.loadTexts: hpicfVrrpStatsEntry.setStatus('current')
hpicfVrrpStatsNearFailovers = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 1, 6, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpicfVrrpStatsNearFailovers.setStatus('current')
hpicfVrrpMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1))
hpicfVrrpMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2))
hpicfVrrpMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 1)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance = hpicfVrrpMIBCompliance.setStatus('deprecated')
hpicfVrrpMIBCompliance1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 2)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance1 = hpicfVrrpMIBCompliance1.setStatus('deprecated')
hpicfVrrpMIBCompliance2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 3)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpVrPingGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrPingGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance2 = hpicfVrrpMIBCompliance2.setStatus('current')
hpicfVrrpMIBCompliance3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 4)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpNonstopGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpNonstopGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance3 = hpicfVrrpMIBCompliance3.setStatus('deprecated')
hpicfVrrpMIBCompliance4 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 5)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperationsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance4 = hpicfVrrpMIBCompliance4.setStatus('deprecated')
hpicfVrrpMIBCompliance5 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 6)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup1"), ("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance5 = hpicfVrrpMIBCompliance5.setStatus('deprecated')
hpicfVrrpMIBCompliance6 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 7)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup1"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup1"), ("HP-ICF-VRRP-MIB", "hpicfVrrpOperGroup1"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackGroup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance6 = hpicfVrrpMIBCompliance6.setStatus('current')
hpicfVrrpMIBCompliance7 = ModuleCompliance((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 1, 8)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpOperationsGroup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpMIBCompliance7 = hpicfVrrpMIBCompliance7.setStatus('current')
hpicfVrrpOperGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 1)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpAdminStatus"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrMode"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrMasterPreempt"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrTransferControl"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrPreemptDelayTime"), ("HP-ICF-VRRP-MIB", "hpicfVrrpAssoIpMask"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpOperGroup = hpicfVrrpOperGroup.setStatus('deprecated')
hpicfVrrpTrackGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 2)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpTrackRowStatus"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrControl"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpTrackGroup = hpicfVrrpTrackGroup.setStatus('deprecated')
hpicfVrrpVrPingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 3)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpVrRespondToPing"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpVrPingGroup = hpicfVrrpVrPingGroup.setStatus('current')
hpicfVrrpNonstopGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 4)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpNonstop"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpNonstopGroup = hpicfVrrpNonstopGroup.setStatus('deprecated')
hpicfVrrpOperationsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 5)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpRespondToPing"), ("HP-ICF-VRRP-MIB", "hpicfVrrpRemoveConfig"), ("HP-ICF-VRRP-MIB", "hpicfVrrpStatsNearFailovers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpOperationsGroup = hpicfVrrpOperationsGroup.setStatus('deprecated')
hpicfVrrpTrackGroup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 6)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpTrackRowStatus"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrControl"), ("HP-ICF-VRRP-MIB", "hpicfVrrpTrackState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpTrackGroup1 = hpicfVrrpTrackGroup1.setStatus('current')
hpicfVrrpOperGroup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 7)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpVrMode"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrMasterPreempt"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrTransferControl"), ("HP-ICF-VRRP-MIB", "hpicfVrrpVrPreemptDelayTime"), ("HP-ICF-VRRP-MIB", "hpicfVrrpAssoIpMask"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpOperGroup1 = hpicfVrrpOperGroup1.setStatus('current')
hpicfVrrpOperationsGroup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 5, 1, 31, 2, 2, 8)).setObjects(("HP-ICF-VRRP-MIB", "hpicfVrrpStatsNearFailovers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hpicfVrrpOperationsGroup1 = hpicfVrrpOperationsGroup1.setStatus('current')
mibBuilder.exportSymbols("HP-ICF-VRRP-MIB", hpicfVrrpVrControl=hpicfVrrpVrControl, hpicfVrrpTrackEntry=hpicfVrrpTrackEntry, hpicfVrrpVrPreemptDelayTime=hpicfVrrpVrPreemptDelayTime, hpicfVrrpTrackTable=hpicfVrrpTrackTable, hpicfVrrpVrTrackEntity=hpicfVrrpVrTrackEntity, hpicfVrrpVrTrackType=hpicfVrrpVrTrackType, hpicfVrrpAssoIpAddrTable=hpicfVrrpAssoIpAddrTable, hpicfVrrpAdminStatus=hpicfVrrpAdminStatus, hpicfVrrpAssoIpAddrEntry=hpicfVrrpAssoIpAddrEntry, hpicfVrrpVrRespondToPing=hpicfVrrpVrRespondToPing, hpicfVrrpStatsEntry=hpicfVrrpStatsEntry, hpicfVrrpRemoveConfig=hpicfVrrpRemoveConfig, hpicfVrrpOperEntry=hpicfVrrpOperEntry, hpicfVrrpOperations=hpicfVrrpOperations, hpicfVrrpMIBCompliance=hpicfVrrpMIBCompliance, hpicfVrrpTrackGroup=hpicfVrrpTrackGroup, hpicfVrrpNonstop=hpicfVrrpNonstop, PYSNMP_MODULE_ID=hpicfVrrpMIB, hpicfVrrpMIBCompliance1=hpicfVrrpMIBCompliance1, hpicfVrrpVrMasterPreempt=hpicfVrrpVrMasterPreempt, hpicfVrrpMIBCompliance5=hpicfVrrpMIBCompliance5, hpicfVrrpNonstopGroup=hpicfVrrpNonstopGroup, hpicfVrrpMIBCompliance3=hpicfVrrpMIBCompliance3, hpicfVrrpTrackGroup1=hpicfVrrpTrackGroup1, hpicfVrrpVrPingGroup=hpicfVrrpVrPingGroup, hpicfVrrpVrMode=hpicfVrrpVrMode, hpicfVrrpOperationsGroup=hpicfVrrpOperationsGroup, hpicfVrrpTrackRowStatus=hpicfVrrpTrackRowStatus, hpicfVrrpConformance=hpicfVrrpConformance, hpicfVrrpMIB=hpicfVrrpMIB, hpicfVrrpMIBCompliance6=hpicfVrrpMIBCompliance6, hpicfVrrpOperGroup1=hpicfVrrpOperGroup1, hpicfVrrpVrTransferControl=hpicfVrrpVrTransferControl, hpicfVrrpMIBCompliances=hpicfVrrpMIBCompliances, hpicfVrrpRespondToPing=hpicfVrrpRespondToPing, hpicfVrrpAssoIpMask=hpicfVrrpAssoIpMask, hpicfVrrpMIBCompliance7=hpicfVrrpMIBCompliance7, hpicfVrrpOperationsGroup1=hpicfVrrpOperationsGroup1, hpicfVrrpStatsNearFailovers=hpicfVrrpStatsNearFailovers, hpicfVrrpStatsTable=hpicfVrrpStatsTable, hpicfVrrpOperGroup=hpicfVrrpOperGroup, hpicfVrrpMIBCompliance4=hpicfVrrpMIBCompliance4, hpicfVrrpMIBGroups=hpicfVrrpMIBGroups, hpicfVrrpMIBCompliance2=hpicfVrrpMIBCompliance2, hpicfVrrpOperTable=hpicfVrrpOperTable, hpicfVrrpTrackState=hpicfVrrpTrackState)
| 120.29927 | 2,097 | 0.738972 | 1,943 | 16,481 | 6.267113 | 0.10911 | 0.029892 | 0.036955 | 0.049273 | 0.425064 | 0.355917 | 0.327831 | 0.320604 | 0.284389 | 0.284389 | 0 | 0.087198 | 0.091924 | 16,481 | 136 | 2,098 | 121.183824 | 0.726447 | 0.019659 | 0 | 0.132231 | 0 | 0 | 0.199158 | 0.03245 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.099174 | 0.082645 | 0 | 0.082645 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
135e814673c97e680e81eb4e69c804ca6704248d | 329 | py | Python | Lib/site-packages/notebook/tests/conftest.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/notebook/tests/conftest.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/notebook/tests/conftest.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | null | null | null | def pytest_addoption(parser):
parser.addoption('--integration_tests', action='store_true', dest="integration_tests",
default=False, help="enable integration tests")
def pytest_configure(config):
if not config.option.integration_tests:
setattr(config.option, 'markexpr', 'not integration_tests')
| 41.125 | 90 | 0.723404 | 37 | 329 | 6.243243 | 0.567568 | 0.34632 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.158055 | 329 | 7 | 91 | 47 | 0.833935 | 0 | 0 | 0 | 0 | 0 | 0.300912 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
1363b543532febcd8b12862ed1ffcd51867ffc1d | 176 | py | Python | src/apps/home/apps.py | avibn/todovib | 4052da86e27825c60be618e986c2c16a23002e3c | [
"MIT"
] | 8 | 2021-06-27T10:39:36.000Z | 2022-01-22T19:47:35.000Z | src/apps/home/apps.py | avibn/todovib | 4052da86e27825c60be618e986c2c16a23002e3c | [
"MIT"
] | null | null | null | src/apps/home/apps.py | avibn/todovib | 4052da86e27825c60be618e986c2c16a23002e3c | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class HomeConfig(AppConfig):
name = "src.apps.home"
verbose_name = "Home"
default_auto_field = "django.db.models.BigAutoField"
| 19.555556 | 56 | 0.727273 | 22 | 176 | 5.681818 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170455 | 176 | 8 | 57 | 22 | 0.856164 | 0 | 0 | 0 | 0 | 0 | 0.261364 | 0.164773 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
1385979e44a8280c9e59c8a7cf1fb4dd1769b326 | 108,575 | py | Python | bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py | nielm/google-cloud-python | fd126fdea34206109eb00d675374ff7dc4dcc5ef | [
"Apache-2.0"
] | 1 | 2019-06-14T10:11:59.000Z | 2019-06-14T10:11:59.000Z | bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py | nielm/google-cloud-python | fd126fdea34206109eb00d675374ff7dc4dcc5ef | [
"Apache-2.0"
] | 1 | 2018-04-06T19:51:23.000Z | 2018-04-06T19:51:23.000Z | bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py | nielm/google-cloud-python | fd126fdea34206109eb00d675374ff7dc4dcc5ef | [
"Apache-2.0"
] | 1 | 2020-04-14T10:47:41.000Z | 2020-04-14T10:47:41.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.cloud.bigquery_datatransfer_v1.proto import (
transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2,
)
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto",
package="google.cloud.bigquery.datatransfer.v1",
syntax="proto3",
serialized_options=_b(
"\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1"
),
serialized_pb=_b(
'\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\xf1\x04\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\xcf\x07\n\nDataSource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12J\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferType\x12#\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02"$\n\x14GetDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListDataSourcesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"{\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x99\x01\n\x1b\x43reateTransferConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12N\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t"\xba\x01\n\x1bUpdateTransferConfigRequest\x12N\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"(\n\x18GetTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"+\n\x1b\x44\x65leteTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"%\n\x15GetTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"(\n\x18\x44\x65leteTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"l\n\x1aListTransferConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x87\x01\n\x1bListTransferConfigsResponse\x12O\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xad\x02\n\x17ListTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"~\n\x18ListTransferRunsResponse\x12I\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xaf\x01\n\x17ListTransferLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x86\x01\n\x18ListTransferLogsResponse\x12Q\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessage\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"&\n\x16\x43heckValidCredsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\x8b\x01\n\x1bScheduleTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\x86\x1b\n\x13\x44\x61taTransferService\x12\xdf\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"^\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\x12\xf0\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"^\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\x12\x9d\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\x12\xbd\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\x12\xda\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"f\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\x12\xf3\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\x12\x84\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\x12\xad\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\x8b\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\x12\xf8\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"t\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\xe2\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"t\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\x89\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"t\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\x12\xa9\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\x12\x97\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x84\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*B\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3'
),
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR,
google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,
google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
],
)
_DATASOURCEPARAMETER_TYPE = _descriptor.EnumDescriptor(
name="Type",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="TYPE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="STRING", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="INTEGER", index=2, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="DOUBLE", index=3, number=3, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="BOOLEAN", index=4, number=4, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="RECORD", index=5, number=5, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="PLUS_PAGE", index=6, number=6, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=876,
serialized_end=981,
)
_sym_db.RegisterEnumDescriptor(_DATASOURCEPARAMETER_TYPE)
_DATASOURCE_AUTHORIZATIONTYPE = _descriptor.EnumDescriptor(
name="AuthorizationType",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="AUTHORIZATION_TYPE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="AUTHORIZATION_CODE",
index=1,
number=1,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="GOOGLE_PLUS_AUTHORIZATION_CODE",
index=2,
number=2,
serialized_options=None,
type=None,
),
],
containing_type=None,
serialized_options=None,
serialized_start=1743,
serialized_end=1858,
)
_sym_db.RegisterEnumDescriptor(_DATASOURCE_AUTHORIZATIONTYPE)
_DATASOURCE_DATAREFRESHTYPE = _descriptor.EnumDescriptor(
name="DataRefreshType",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="DATA_REFRESH_TYPE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="SLIDING_WINDOW", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="CUSTOM_SLIDING_WINDOW",
index=2,
number=2,
serialized_options=None,
type=None,
),
],
containing_type=None,
serialized_options=None,
serialized_start=1860,
serialized_end=1959,
)
_sym_db.RegisterEnumDescriptor(_DATASOURCE_DATAREFRESHTYPE)
_LISTTRANSFERRUNSREQUEST_RUNATTEMPT = _descriptor.EnumDescriptor(
name="RunAttempt",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="RUN_ATTEMPT_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="LATEST", index=1, number=1, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=3215,
serialized_end=3268,
)
_sym_db.RegisterEnumDescriptor(_LISTTRANSFERRUNSREQUEST_RUNATTEMPT)
_DATASOURCEPARAMETER = _descriptor.Descriptor(
name="DataSourceParameter",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="param_id",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.param_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="display_name",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.display_name",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="description",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.description",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="type",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.type",
index=3,
number=4,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="required",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.required",
index=4,
number=5,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="repeated",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.repeated",
index=5,
number=6,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="validation_regex",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_regex",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="allowed_values",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.allowed_values",
index=7,
number=8,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="min_value",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.min_value",
index=8,
number=9,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="max_value",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.max_value",
index=9,
number=10,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="fields",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.fields",
index=10,
number=11,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="validation_description",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_description",
index=11,
number=12,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="validation_help_url",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_help_url",
index=12,
number=13,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="immutable",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.immutable",
index=13,
number=14,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="recurse",
full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.recurse",
index=14,
number=15,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_DATASOURCEPARAMETER_TYPE],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=356,
serialized_end=981,
)
_DATASOURCE = _descriptor.Descriptor(
name="DataSource",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data_source_id",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_source_id",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="display_name",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.display_name",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="description",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.description",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="client_id",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.client_id",
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="scopes",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.scopes",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="transfer_type",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.transfer_type",
index=6,
number=7,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="supports_multiple_transfers",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_multiple_transfers",
index=7,
number=8,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="update_deadline_seconds",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.update_deadline_seconds",
index=8,
number=9,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="default_schedule",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_schedule",
index=9,
number=10,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="supports_custom_schedule",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_custom_schedule",
index=10,
number=11,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="parameters",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.parameters",
index=11,
number=12,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="help_url",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.help_url",
index=12,
number=13,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="authorization_type",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.authorization_type",
index=13,
number=14,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data_refresh_type",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_refresh_type",
index=14,
number=15,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="default_data_refresh_window_days",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_data_refresh_window_days",
index=15,
number=16,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="manual_runs_disabled",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.manual_runs_disabled",
index=16,
number=17,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="minimum_schedule_interval",
full_name="google.cloud.bigquery.datatransfer.v1.DataSource.minimum_schedule_interval",
index=17,
number=18,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=984,
serialized_end=1959,
)
_GETDATASOURCEREQUEST = _descriptor.Descriptor(
name="GetDataSourceRequest",
full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1961,
serialized_end=1997,
)
_LISTDATASOURCESREQUEST = _descriptor.Descriptor(
name="ListDataSourcesRequest",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_token",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_size",
index=2,
number=4,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1999,
serialized_end=2078,
)
_LISTDATASOURCESRESPONSE = _descriptor.Descriptor(
name="ListDataSourcesResponse",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="data_sources",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.data_sources",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="next_page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.next_page_token",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2080,
serialized_end=2203,
)
_CREATETRANSFERCONFIGREQUEST = _descriptor.Descriptor(
name="CreateTransferConfigRequest",
full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="transfer_config",
full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.transfer_config",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="authorization_code",
full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.authorization_code",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2206,
serialized_end=2359,
)
_UPDATETRANSFERCONFIGREQUEST = _descriptor.Descriptor(
name="UpdateTransferConfigRequest",
full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transfer_config",
full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.transfer_config",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="authorization_code",
full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.authorization_code",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="update_mask",
full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.update_mask",
index=2,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2362,
serialized_end=2548,
)
_GETTRANSFERCONFIGREQUEST = _descriptor.Descriptor(
name="GetTransferConfigRequest",
full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2550,
serialized_end=2590,
)
_DELETETRANSFERCONFIGREQUEST = _descriptor.Descriptor(
name="DeleteTransferConfigRequest",
full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2592,
serialized_end=2635,
)
_GETTRANSFERRUNREQUEST = _descriptor.Descriptor(
name="GetTransferRunRequest",
full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2637,
serialized_end=2674,
)
_DELETETRANSFERRUNREQUEST = _descriptor.Descriptor(
name="DeleteTransferRunRequest",
full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2676,
serialized_end=2716,
)
_LISTTRANSFERCONFIGSREQUEST = _descriptor.Descriptor(
name="ListTransferConfigsRequest",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data_source_ids",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.data_source_ids",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_token",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_size",
index=3,
number=4,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2718,
serialized_end=2826,
)
_LISTTRANSFERCONFIGSRESPONSE = _descriptor.Descriptor(
name="ListTransferConfigsResponse",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transfer_configs",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.transfer_configs",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="next_page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.next_page_token",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2829,
serialized_end=2964,
)
_LISTTRANSFERRUNSREQUEST = _descriptor.Descriptor(
name="ListTransferRunsRequest",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="states",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.states",
index=1,
number=2,
type=14,
cpp_type=8,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_token",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_size",
index=3,
number=4,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="run_attempt",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.run_attempt",
index=4,
number=5,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_LISTTRANSFERRUNSREQUEST_RUNATTEMPT],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2967,
serialized_end=3268,
)
_LISTTRANSFERRUNSRESPONSE = _descriptor.Descriptor(
name="ListTransferRunsResponse",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transfer_runs",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.transfer_runs",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="next_page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.next_page_token",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3270,
serialized_end=3396,
)
_LISTTRANSFERLOGSREQUEST = _descriptor.Descriptor(
name="ListTransferLogsRequest",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_token",
index=1,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_size",
index=2,
number=5,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="message_types",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.message_types",
index=3,
number=6,
type=14,
cpp_type=8,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3399,
serialized_end=3574,
)
_LISTTRANSFERLOGSRESPONSE = _descriptor.Descriptor(
name="ListTransferLogsResponse",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transfer_messages",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.transfer_messages",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="next_page_token",
full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.next_page_token",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3577,
serialized_end=3711,
)
_CHECKVALIDCREDSREQUEST = _descriptor.Descriptor(
name="CheckValidCredsRequest",
full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3713,
serialized_end=3751,
)
_CHECKVALIDCREDSRESPONSE = _descriptor.Descriptor(
name="CheckValidCredsResponse",
full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="has_valid_creds",
full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse.has_valid_creds",
index=0,
number=1,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3753,
serialized_end=3803,
)
_SCHEDULETRANSFERRUNSREQUEST = _descriptor.Descriptor(
name="ScheduleTransferRunsRequest",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="parent",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.parent",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="start_time",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.start_time",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="end_time",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.end_time",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3806,
serialized_end=3945,
)
_SCHEDULETRANSFERRUNSRESPONSE = _descriptor.Descriptor(
name="ScheduleTransferRunsResponse",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="runs",
full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse.runs",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3947,
serialized_end=4043,
)
_DATASOURCEPARAMETER.fields_by_name["type"].enum_type = _DATASOURCEPARAMETER_TYPE
_DATASOURCEPARAMETER.fields_by_name[
"min_value"
].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_DATASOURCEPARAMETER.fields_by_name[
"max_value"
].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_DATASOURCEPARAMETER.fields_by_name["fields"].message_type = _DATASOURCEPARAMETER
_DATASOURCEPARAMETER_TYPE.containing_type = _DATASOURCEPARAMETER
_DATASOURCE.fields_by_name[
"transfer_type"
].enum_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERTYPE
)
_DATASOURCE.fields_by_name["parameters"].message_type = _DATASOURCEPARAMETER
_DATASOURCE.fields_by_name[
"authorization_type"
].enum_type = _DATASOURCE_AUTHORIZATIONTYPE
_DATASOURCE.fields_by_name["data_refresh_type"].enum_type = _DATASOURCE_DATAREFRESHTYPE
_DATASOURCE.fields_by_name[
"minimum_schedule_interval"
].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_DATASOURCE_AUTHORIZATIONTYPE.containing_type = _DATASOURCE
_DATASOURCE_DATAREFRESHTYPE.containing_type = _DATASOURCE
_LISTDATASOURCESRESPONSE.fields_by_name["data_sources"].message_type = _DATASOURCE
_CREATETRANSFERCONFIGREQUEST.fields_by_name[
"transfer_config"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG
)
_UPDATETRANSFERCONFIGREQUEST.fields_by_name[
"transfer_config"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG
)
_UPDATETRANSFERCONFIGREQUEST.fields_by_name[
"update_mask"
].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_LISTTRANSFERCONFIGSRESPONSE.fields_by_name[
"transfer_configs"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG
)
_LISTTRANSFERRUNSREQUEST.fields_by_name[
"states"
].enum_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERSTATE
)
_LISTTRANSFERRUNSREQUEST.fields_by_name[
"run_attempt"
].enum_type = _LISTTRANSFERRUNSREQUEST_RUNATTEMPT
_LISTTRANSFERRUNSREQUEST_RUNATTEMPT.containing_type = _LISTTRANSFERRUNSREQUEST
_LISTTRANSFERRUNSRESPONSE.fields_by_name[
"transfer_runs"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN
)
_LISTTRANSFERLOGSREQUEST.fields_by_name[
"message_types"
].enum_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE_MESSAGESEVERITY
)
_LISTTRANSFERLOGSRESPONSE.fields_by_name[
"transfer_messages"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE
)
_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[
"start_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[
"end_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_SCHEDULETRANSFERRUNSRESPONSE.fields_by_name[
"runs"
].message_type = (
google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN
)
DESCRIPTOR.message_types_by_name["DataSourceParameter"] = _DATASOURCEPARAMETER
DESCRIPTOR.message_types_by_name["DataSource"] = _DATASOURCE
DESCRIPTOR.message_types_by_name["GetDataSourceRequest"] = _GETDATASOURCEREQUEST
DESCRIPTOR.message_types_by_name["ListDataSourcesRequest"] = _LISTDATASOURCESREQUEST
DESCRIPTOR.message_types_by_name["ListDataSourcesResponse"] = _LISTDATASOURCESRESPONSE
DESCRIPTOR.message_types_by_name[
"CreateTransferConfigRequest"
] = _CREATETRANSFERCONFIGREQUEST
DESCRIPTOR.message_types_by_name[
"UpdateTransferConfigRequest"
] = _UPDATETRANSFERCONFIGREQUEST
DESCRIPTOR.message_types_by_name["GetTransferConfigRequest"] = _GETTRANSFERCONFIGREQUEST
DESCRIPTOR.message_types_by_name[
"DeleteTransferConfigRequest"
] = _DELETETRANSFERCONFIGREQUEST
DESCRIPTOR.message_types_by_name["GetTransferRunRequest"] = _GETTRANSFERRUNREQUEST
DESCRIPTOR.message_types_by_name["DeleteTransferRunRequest"] = _DELETETRANSFERRUNREQUEST
DESCRIPTOR.message_types_by_name[
"ListTransferConfigsRequest"
] = _LISTTRANSFERCONFIGSREQUEST
DESCRIPTOR.message_types_by_name[
"ListTransferConfigsResponse"
] = _LISTTRANSFERCONFIGSRESPONSE
DESCRIPTOR.message_types_by_name["ListTransferRunsRequest"] = _LISTTRANSFERRUNSREQUEST
DESCRIPTOR.message_types_by_name["ListTransferRunsResponse"] = _LISTTRANSFERRUNSRESPONSE
DESCRIPTOR.message_types_by_name["ListTransferLogsRequest"] = _LISTTRANSFERLOGSREQUEST
DESCRIPTOR.message_types_by_name["ListTransferLogsResponse"] = _LISTTRANSFERLOGSRESPONSE
DESCRIPTOR.message_types_by_name["CheckValidCredsRequest"] = _CHECKVALIDCREDSREQUEST
DESCRIPTOR.message_types_by_name["CheckValidCredsResponse"] = _CHECKVALIDCREDSRESPONSE
DESCRIPTOR.message_types_by_name[
"ScheduleTransferRunsRequest"
] = _SCHEDULETRANSFERRUNSREQUEST
DESCRIPTOR.message_types_by_name[
"ScheduleTransferRunsResponse"
] = _SCHEDULETRANSFERRUNSRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DataSourceParameter = _reflection.GeneratedProtocolMessageType(
"DataSourceParameter",
(_message.Message,),
dict(
DESCRIPTOR=_DATASOURCEPARAMETER,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""Represents a data source parameter with validation rules, so that
parameters can be rendered in the UI. These parameters are given to us
by supported data sources, and include all needed information for
rendering and validation. Thus, whoever uses this api can decide to
generate either generic ui, or custom data source specific forms.
Attributes:
param_id:
Parameter identifier.
display_name:
Parameter display name in the user interface.
description:
Parameter description.
type:
Parameter type.
required:
Is parameter required.
repeated:
Can parameter have multiple values.
validation_regex:
Regular expression which can be used for parameter validation.
allowed_values:
All possible values for the parameter.
min_value:
For integer and double values specifies minimum allowed value.
max_value:
For integer and double values specifies maxminum allowed
value.
fields:
When parameter is a record, describes child fields.
validation_description:
Description of the requirements for this field, in case the
user input does not fulfill the regex pattern or min/max
values.
validation_help_url:
URL to a help document to further explain the naming
requirements.
immutable:
Cannot be changed after initial creation.
recurse:
If set to true, schema should be taken from the parent with
the same parameter\_id. Only applicable when parameter type is
RECORD.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceParameter)
),
)
_sym_db.RegisterMessage(DataSourceParameter)
DataSource = _reflection.GeneratedProtocolMessageType(
"DataSource",
(_message.Message,),
dict(
DESCRIPTOR=_DATASOURCE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""Represents data source metadata. Metadata is sufficient to render UI and
request proper OAuth tokens.
Attributes:
name:
Output only. Data source resource name.
data_source_id:
Data source id.
display_name:
User friendly data source name.
description:
User friendly data source description string.
client_id:
Data source client id which should be used to receive refresh
token. When not supplied, no offline credentials are populated
for data transfer.
scopes:
Api auth scopes for which refresh token needs to be obtained.
Only valid when ``client_id`` is specified. Ignored otherwise.
These are scopes needed by a data source to prepare data and
ingest them into BigQuery, e.g.,
https://www.googleapis.com/auth/bigquery
transfer_type:
Deprecated. This field has no effect.
supports_multiple_transfers:
Indicates whether the data source supports multiple transfers
to different BigQuery targets.
update_deadline_seconds:
The number of seconds to wait for an update from the data
source before BigQuery marks the transfer as failed.
default_schedule:
Default data transfer schedule. Examples of valid schedules
include: ``1st,3rd monday of month 15:30``, ``every wed,fri of
jan,jun 13:15``, and ``first sunday of quarter 00:00``.
supports_custom_schedule:
Specifies whether the data source supports a user defined
schedule, or operates on the default schedule. When set to
``true``, user can override default schedule.
parameters:
Data source parameters.
help_url:
Url for the help document for this data source.
authorization_type:
Indicates the type of authorization.
data_refresh_type:
Specifies whether the data source supports automatic data
refresh for the past few days, and how it's supported. For
some data sources, data might not be complete until a few days
later, so it's useful to refresh data automatically.
default_data_refresh_window_days:
Default data refresh window on days. Only meaningful when
``data_refresh_type`` = ``SLIDING_WINDOW``.
manual_runs_disabled:
Disables backfilling and manual run scheduling for the data
source.
minimum_schedule_interval:
The minimum interval for scheduler to schedule runs.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSource)
),
)
_sym_db.RegisterMessage(DataSource)
GetDataSourceRequest = _reflection.GeneratedProtocolMessageType(
"GetDataSourceRequest",
(_message.Message,),
dict(
DESCRIPTOR=_GETDATASOURCEREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to get data source info.
Attributes:
name:
The field will contain name of the resource requested, for
example:
``projects/{project_id}/dataSources/{data_source_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest)
),
)
_sym_db.RegisterMessage(GetDataSourceRequest)
ListDataSourcesRequest = _reflection.GeneratedProtocolMessageType(
"ListDataSourcesRequest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTDATASOURCESREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""Request to list supported data sources and their data transfer settings.
Attributes:
parent:
The BigQuery project id for which data sources should be
returned. Must be in the form: ``projects/{project_id}``
page_token:
Pagination token, which can be used to request a specific page
of ``ListDataSourcesRequest`` list results. For multiple-page
results, ``ListDataSourcesResponse`` outputs a ``next_page``
token, which can be used as the ``page_token`` value to
request the next page of list results.
page_size:
Page size. The default page size is the maximum value of 1000
results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest)
),
)
_sym_db.RegisterMessage(ListDataSourcesRequest)
ListDataSourcesResponse = _reflection.GeneratedProtocolMessageType(
"ListDataSourcesResponse",
(_message.Message,),
dict(
DESCRIPTOR=_LISTDATASOURCESRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""Returns list of supported data sources and their metadata.
Attributes:
data_sources:
List of supported data sources and their transfer settings.
next_page_token:
Output only. The next-pagination token. For multiple-page list
results, this token can be used as the
``ListDataSourcesRequest.page_token`` to request the next page
of list results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse)
),
)
_sym_db.RegisterMessage(ListDataSourcesResponse)
CreateTransferConfigRequest = _reflection.GeneratedProtocolMessageType(
"CreateTransferConfigRequest",
(_message.Message,),
dict(
DESCRIPTOR=_CREATETRANSFERCONFIGREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to create a data transfer configuration. If new credentials
are needed for this transfer configuration, an authorization code must
be provided. If an authorization code is provided, the transfer
configuration will be associated with the user id corresponding to the
authorization code. Otherwise, the transfer configuration will be
associated with the calling user.
Attributes:
parent:
The BigQuery project id where the transfer configuration
should be created. Must be in the format
/projects/{project\_id}/locations/{location\_id} If specified
location and location of the destination bigquery dataset do
not match - the request will fail.
transfer_config:
Data transfer configuration to create.
authorization_code:
Optional OAuth2 authorization code to use with this transfer
configuration. This is required if new credentials are needed,
as indicated by ``CheckValidCreds``. In order to obtain
authorization\_code, please make a request to https://www.gsta
tic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&re
direct\_uri= - client\_id should be OAuth client\_id of
BigQuery DTS API for the given data source returned by
ListDataSources method. - data\_source\_scopes are the scopes
returned by ListDataSources method. - redirect\_uri is an
optional parameter. If not specified, then authorization
code is posted to the opener of authorization flow window.
Otherwise it will be sent to the redirect uri. A special
value of urn:ietf:wg:oauth:2.0:oob means that authorization
code should be returned in the title bar of the browser,
with the page text prompting the user to copy the code and
paste it in the application.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest)
),
)
_sym_db.RegisterMessage(CreateTransferConfigRequest)
UpdateTransferConfigRequest = _reflection.GeneratedProtocolMessageType(
"UpdateTransferConfigRequest",
(_message.Message,),
dict(
DESCRIPTOR=_UPDATETRANSFERCONFIGREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to update a transfer configuration. To update the user id of
the transfer configuration, an authorization code needs to be provided.
Attributes:
transfer_config:
Data transfer configuration to create.
authorization_code:
Optional OAuth2 authorization code to use with this transfer
configuration. If it is provided, the transfer configuration
will be associated with the authorizing user. In order to
obtain authorization\_code, please make a request to https://w
ww.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&sc
ope=&redirect\_uri= - client\_id should be OAuth client\_id
of BigQuery DTS API for the given data source returned by
ListDataSources method. - data\_source\_scopes are the scopes
returned by ListDataSources method. - redirect\_uri is an
optional parameter. If not specified, then authorization
code is posted to the opener of authorization flow window.
Otherwise it will be sent to the redirect uri. A special
value of urn:ietf:wg:oauth:2.0:oob means that authorization
code should be returned in the title bar of the browser,
with the page text prompting the user to copy the code and
paste it in the application.
update_mask:
Required list of fields to be updated in this request.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest)
),
)
_sym_db.RegisterMessage(UpdateTransferConfigRequest)
GetTransferConfigRequest = _reflection.GeneratedProtocolMessageType(
"GetTransferConfigRequest",
(_message.Message,),
dict(
DESCRIPTOR=_GETTRANSFERCONFIGREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to get data transfer information.
Attributes:
name:
The field will contain name of the resource requested, for
example: ``projects/{project_id}/transferConfigs/{config_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest)
),
)
_sym_db.RegisterMessage(GetTransferConfigRequest)
DeleteTransferConfigRequest = _reflection.GeneratedProtocolMessageType(
"DeleteTransferConfigRequest",
(_message.Message,),
dict(
DESCRIPTOR=_DELETETRANSFERCONFIGREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to delete data transfer information. All associated transfer
runs and log messages will be deleted as well.
Attributes:
name:
The field will contain name of the resource requested, for
example: ``projects/{project_id}/transferConfigs/{config_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest)
),
)
_sym_db.RegisterMessage(DeleteTransferConfigRequest)
GetTransferRunRequest = _reflection.GeneratedProtocolMessageType(
"GetTransferRunRequest",
(_message.Message,),
dict(
DESCRIPTOR=_GETTRANSFERRUNREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to get data transfer run information.
Attributes:
name:
The field will contain name of the resource requested, for
example: ``projects/{project_id}/transferConfigs/{config_id}/r
uns/{run_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest)
),
)
_sym_db.RegisterMessage(GetTransferRunRequest)
DeleteTransferRunRequest = _reflection.GeneratedProtocolMessageType(
"DeleteTransferRunRequest",
(_message.Message,),
dict(
DESCRIPTOR=_DELETETRANSFERRUNREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to delete data transfer run information.
Attributes:
name:
The field will contain name of the resource requested, for
example: ``projects/{project_id}/transferConfigs/{config_id}/r
uns/{run_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest)
),
)
_sym_db.RegisterMessage(DeleteTransferRunRequest)
ListTransferConfigsRequest = _reflection.GeneratedProtocolMessageType(
"ListTransferConfigsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERCONFIGSREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to list data transfers configured for a BigQuery project.
Attributes:
parent:
The BigQuery project id for which data sources should be
returned: ``projects/{project_id}``.
data_source_ids:
When specified, only configurations of requested data sources
are returned.
page_token:
Pagination token, which can be used to request a specific page
of ``ListTransfersRequest`` list results. For multiple-page
results, ``ListTransfersResponse`` outputs a ``next_page``
token, which can be used as the ``page_token`` value to
request the next page of list results.
page_size:
Page size. The default page size is the maximum value of 1000
results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest)
),
)
_sym_db.RegisterMessage(ListTransferConfigsRequest)
ListTransferConfigsResponse = _reflection.GeneratedProtocolMessageType(
"ListTransferConfigsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERCONFIGSRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""The returned list of pipelines in the project.
Attributes:
transfer_configs:
Output only. The stored pipeline transfer configurations.
next_page_token:
Output only. The next-pagination token. For multiple-page list
results, this token can be used as the
``ListTransferConfigsRequest.page_token`` to request the next
page of list results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse)
),
)
_sym_db.RegisterMessage(ListTransferConfigsResponse)
ListTransferRunsRequest = _reflection.GeneratedProtocolMessageType(
"ListTransferRunsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERRUNSREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to list data transfer runs. UI can use this method to
show/filter specific data transfer runs. The data source can use this
method to request all scheduled transfer runs.
Attributes:
parent:
Name of transfer configuration for which transfer runs should
be retrieved. Format of transfer configuration resource name
is: ``projects/{project_id}/transferConfigs/{config_id}``.
states:
When specified, only transfer runs with requested states are
returned.
page_token:
Pagination token, which can be used to request a specific page
of ``ListTransferRunsRequest`` list results. For multiple-page
results, ``ListTransferRunsResponse`` outputs a ``next_page``
token, which can be used as the ``page_token`` value to
request the next page of list results.
page_size:
Page size. The default page size is the maximum value of 1000
results.
run_attempt:
Indicates how run attempts are to be pulled.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest)
),
)
_sym_db.RegisterMessage(ListTransferRunsRequest)
ListTransferRunsResponse = _reflection.GeneratedProtocolMessageType(
"ListTransferRunsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERRUNSRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""The returned list of pipelines in the project.
Attributes:
transfer_runs:
Output only. The stored pipeline transfer runs.
next_page_token:
Output only. The next-pagination token. For multiple-page list
results, this token can be used as the
``ListTransferRunsRequest.page_token`` to request the next
page of list results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse)
),
)
_sym_db.RegisterMessage(ListTransferRunsResponse)
ListTransferLogsRequest = _reflection.GeneratedProtocolMessageType(
"ListTransferLogsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERLOGSREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to get user facing log messages associated with data transfer
run.
Attributes:
parent:
Transfer run name in the form: ``projects/{project_id}/transfe
rConfigs/{config_Id}/runs/{run_id}``.
page_token:
Pagination token, which can be used to request a specific page
of ``ListTransferLogsRequest`` list results. For multiple-page
results, ``ListTransferLogsResponse`` outputs a ``next_page``
token, which can be used as the ``page_token`` value to
request the next page of list results.
page_size:
Page size. The default page size is the maximum value of 1000
results.
message_types:
Message types to return. If not populated - INFO, WARNING and
ERROR messages are returned.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest)
),
)
_sym_db.RegisterMessage(ListTransferLogsRequest)
ListTransferLogsResponse = _reflection.GeneratedProtocolMessageType(
"ListTransferLogsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_LISTTRANSFERLOGSRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""The returned list transfer run messages.
Attributes:
transfer_messages:
Output only. The stored pipeline transfer messages.
next_page_token:
Output only. The next-pagination token. For multiple-page list
results, this token can be used as the
``GetTransferRunLogRequest.page_token`` to request the next
page of list results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse)
),
)
_sym_db.RegisterMessage(ListTransferLogsResponse)
CheckValidCredsRequest = _reflection.GeneratedProtocolMessageType(
"CheckValidCredsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_CHECKVALIDCREDSREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to determine whether the user has valid credentials. This
method is used to limit the number of OAuth popups in the user
interface. The user id is inferred from the API call context. If the
data source has the Google+ authorization type, this method returns
false, as it cannot be determined whether the credentials are already
valid merely based on the user id.
Attributes:
name:
The data source in the form:
``projects/{project_id}/dataSources/{data_source_id}``
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest)
),
)
_sym_db.RegisterMessage(CheckValidCredsRequest)
CheckValidCredsResponse = _reflection.GeneratedProtocolMessageType(
"CheckValidCredsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_CHECKVALIDCREDSRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A response indicating whether the credentials exist and are valid.
Attributes:
has_valid_creds:
If set to ``true``, the credentials exist and are valid.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse)
),
)
_sym_db.RegisterMessage(CheckValidCredsResponse)
ScheduleTransferRunsRequest = _reflection.GeneratedProtocolMessageType(
"ScheduleTransferRunsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_SCHEDULETRANSFERRUNSREQUEST,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A request to schedule transfer runs for a time range.
Attributes:
parent:
Transfer configuration name in the form:
``projects/{project_id}/transferConfigs/{config_id}``.
start_time:
Start time of the range of transfer runs. For example,
``"2017-05-25T00:00:00+00:00"``.
end_time:
End time of the range of transfer runs. For example,
``"2017-05-30T00:00:00+00:00"``.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest)
),
)
_sym_db.RegisterMessage(ScheduleTransferRunsRequest)
ScheduleTransferRunsResponse = _reflection.GeneratedProtocolMessageType(
"ScheduleTransferRunsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_SCHEDULETRANSFERRUNSRESPONSE,
__module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2",
__doc__="""A response to schedule transfer runs for a time range.
Attributes:
runs:
The transfer runs that were scheduled.
""",
# @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse)
),
)
_sym_db.RegisterMessage(ScheduleTransferRunsResponse)
DESCRIPTOR._options = None
_DATATRANSFERSERVICE = _descriptor.ServiceDescriptor(
name="DataTransferService",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService",
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=4046,
serialized_end=7508,
methods=[
_descriptor.MethodDescriptor(
name="GetDataSource",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource",
index=0,
containing_service=None,
input_type=_GETDATASOURCEREQUEST,
output_type=_DATASOURCE,
serialized_options=_b(
"\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}"
),
),
_descriptor.MethodDescriptor(
name="ListDataSources",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources",
index=1,
containing_service=None,
input_type=_LISTDATASOURCESREQUEST,
output_type=_LISTDATASOURCESRESPONSE,
serialized_options=_b(
"\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources"
),
),
_descriptor.MethodDescriptor(
name="CreateTransferConfig",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig",
index=2,
containing_service=None,
input_type=_CREATETRANSFERCONFIGREQUEST,
output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG,
serialized_options=_b(
'\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config'
),
),
_descriptor.MethodDescriptor(
name="UpdateTransferConfig",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig",
index=3,
containing_service=None,
input_type=_UPDATETRANSFERCONFIGREQUEST,
output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG,
serialized_options=_b(
"\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config"
),
),
_descriptor.MethodDescriptor(
name="DeleteTransferConfig",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig",
index=4,
containing_service=None,
input_type=_DELETETRANSFERCONFIGREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=_b(
"\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}"
),
),
_descriptor.MethodDescriptor(
name="GetTransferConfig",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig",
index=5,
containing_service=None,
input_type=_GETTRANSFERCONFIGREQUEST,
output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG,
serialized_options=_b(
"\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}"
),
),
_descriptor.MethodDescriptor(
name="ListTransferConfigs",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs",
index=6,
containing_service=None,
input_type=_LISTTRANSFERCONFIGSREQUEST,
output_type=_LISTTRANSFERCONFIGSRESPONSE,
serialized_options=_b(
"\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs"
),
),
_descriptor.MethodDescriptor(
name="ScheduleTransferRuns",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns",
index=7,
containing_service=None,
input_type=_SCHEDULETRANSFERRUNSREQUEST,
output_type=_SCHEDULETRANSFERRUNSRESPONSE,
serialized_options=_b(
'\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*'
),
),
_descriptor.MethodDescriptor(
name="GetTransferRun",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun",
index=8,
containing_service=None,
input_type=_GETTRANSFERRUNREQUEST,
output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN,
serialized_options=_b(
"\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}"
),
),
_descriptor.MethodDescriptor(
name="DeleteTransferRun",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun",
index=9,
containing_service=None,
input_type=_DELETETRANSFERRUNREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=_b(
"\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}"
),
),
_descriptor.MethodDescriptor(
name="ListTransferRuns",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns",
index=10,
containing_service=None,
input_type=_LISTTRANSFERRUNSREQUEST,
output_type=_LISTTRANSFERRUNSRESPONSE,
serialized_options=_b(
"\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs"
),
),
_descriptor.MethodDescriptor(
name="ListTransferLogs",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs",
index=11,
containing_service=None,
input_type=_LISTTRANSFERLOGSREQUEST,
output_type=_LISTTRANSFERLOGSRESPONSE,
serialized_options=_b(
"\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs"
),
),
_descriptor.MethodDescriptor(
name="CheckValidCreds",
full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds",
index=12,
containing_service=None,
input_type=_CHECKVALIDCREDSREQUEST,
output_type=_CHECKVALIDCREDSRESPONSE,
serialized_options=_b(
'\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*'
),
),
],
)
_sym_db.RegisterServiceDescriptor(_DATATRANSFERSERVICE)
DESCRIPTOR.services_by_name["DataTransferService"] = _DATATRANSFERSERVICE
# @@protoc_insertion_point(module_scope)
| 38.20373 | 10,536 | 0.647442 | 11,103 | 108,575 | 6.085743 | 0.071422 | 0.03102 | 0.077697 | 0.095427 | 0.711544 | 0.669676 | 0.614518 | 0.582699 | 0.500799 | 0.472221 | 0 | 0.037559 | 0.256744 | 108,575 | 2,841 | 10,537 | 38.217177 | 0.79975 | 0.021257 | 0 | 0.713224 | 1 | 0.004829 | 0.362421 | 0.214823 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.004458 | 0 | 0.004458 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
13a38c16f3079a0534ef8856454f22bf982b480d | 275 | py | Python | sanalberto/models/__init__.py | xJavii8/dafi-system | 6f4a3f77c40424a0c1d7c80236f0bf52be4304d2 | [
"MIT"
] | 7 | 2019-08-03T12:25:18.000Z | 2021-11-02T12:51:33.000Z | sanalberto/models/__init__.py | xJavii8/dafi-system | 6f4a3f77c40424a0c1d7c80236f0bf52be4304d2 | [
"MIT"
] | 11 | 2019-08-20T17:07:37.000Z | 2021-11-23T14:26:07.000Z | sanalberto/models/__init__.py | xJavii8/dafi-system | 6f4a3f77c40424a0c1d7c80236f0bf52be4304d2 | [
"MIT"
] | 4 | 2020-04-06T11:33:02.000Z | 2021-10-31T09:10:53.000Z | from .activities import (
Activity,
ActivityRegistration,
)
from .event import Event
from .polls import (
Poll,
PollDesign,
PollVote,
)
__all__ = [
"Activity",
"ActivityRegistration",
"Event",
"Poll",
"PollDesign",
"PollVote",
]
| 13.095238 | 27 | 0.607273 | 22 | 275 | 7.409091 | 0.5 | 0.343558 | 0.269939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.272727 | 275 | 20 | 28 | 13.75 | 0.815 | 0 | 0 | 0 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
13a39a198bb1ac7e5168dace2872149ebb01c8f5 | 244 | py | Python | recipes/Python/577509_Flattening_lists/recipe-577509.py | tdiprima/code | 61a74f5f93da087d27c70b2efe779ac6bd2a3b4f | [
"MIT"
] | 2,023 | 2017-07-29T09:34:46.000Z | 2022-03-24T08:00:45.000Z | recipes/Python/577509_Flattening_lists/recipe-577509.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 32 | 2017-09-02T17:20:08.000Z | 2022-02-11T17:49:37.000Z | recipes/Python/577509_Flattening_lists/recipe-577509.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
] | 780 | 2017-07-28T19:23:28.000Z | 2022-03-25T20:39:41.000Z | def flatten(lst):
if lst:
car,*cdr=lst
if isinstance(car,(list,tuple)):
if cdr: return flatten(car) + flatten(cdr)
return flatten(car)
if cdr: return [car] + flatten(cdr)
return [car]
| 27.111111 | 54 | 0.536885 | 31 | 244 | 4.225806 | 0.322581 | 0.274809 | 0.167939 | 0.290076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.340164 | 244 | 8 | 55 | 30.5 | 0.813665 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
13a557c23a1c493e9bfae90f7c32b2452bfd2e9c | 669 | py | Python | hessen/__init__.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | 12 | 2022-02-23T11:06:06.000Z | 2022-03-04T17:21:44.000Z | hessen/__init__.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | null | null | null | hessen/__init__.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | null | null | null | from .dadi import dadi
from .frankfurt import frankfurt
from .giessen import giessen
from .grossgerau import grossgerau
from .hersfeld import hersfeld
from .hochtaunus import hochtaunus
from .kassel import kassel
from .lahndill import lahndill
from .limburg import limburg
from .mainkinzig import mainkinzig
from .marburg import marburg
from .mtk import mtk
from .odenwaldkreis import odenwaldkreis
from .offenbach import offenbach
from .rheingau import rheingau
from .schwalmeder import schwalmeder
from .vogelsberg import vogelsberg
from .waldeck import waldeck
from .werrameissner import werrameissner
from .wetterau import wetterau
from .wiesbaden import wiesbaden
| 30.409091 | 40 | 0.843049 | 84 | 669 | 6.714286 | 0.27381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125561 | 669 | 21 | 41 | 31.857143 | 0.964103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 3 |
13c55ea8c12744073568ef48ad7a056d5a069440 | 333 | py | Python | server/track_statistic_aggregation/micro_track_statistics/app.py | ndjuric93/MusicOrganizer | ef2e50abfeb1629325274b260f654935fd3f2740 | [
"Apache-2.0"
] | 1 | 2019-09-13T18:05:27.000Z | 2019-09-13T18:05:27.000Z | server/track_statistic_aggregation/micro_track_statistics/app.py | ndjuric93/MusicOrganizer | ef2e50abfeb1629325274b260f654935fd3f2740 | [
"Apache-2.0"
] | 5 | 2021-03-09T00:49:53.000Z | 2022-02-17T20:03:16.000Z | server/track_statistic_aggregation/micro_track_statistics/app.py | ndjuric93/MusicOrganizer | ef2e50abfeb1629325274b260f654935fd3f2740 | [
"Apache-2.0"
] | null | null | null | """ Flask application """
from micro_track_statistics import create_app
from micro_track_statistics.config import SERVER_CONFIG
SERVICE_NAME = 'MicroTrackStatistics'
if __name__ == '__main__':
app = create_app(name=SERVICE_NAME, **SERVER_CONFIG)
app.run(host=SERVER_CONFIG['host'], port=SERVER_CONFIG['port'], debug=True)
| 30.272727 | 79 | 0.774775 | 43 | 333 | 5.534884 | 0.488372 | 0.201681 | 0.117647 | 0.201681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 333 | 10 | 80 | 33.3 | 0.804054 | 0.051051 | 0 | 0 | 0 | 0 | 0.116883 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
b917b44f3cb0b6e7c2becb21749be616d8105c51 | 160 | py | Python | anuvaad-etl/anuvaad-nmt-models-fetch/src/utilities/app_context.py | ManavTriesStuff/anuvaad | 6993e3ac78818c171c173ccf8acf962ff57856a4 | [
"MIT"
] | 15 | 2021-01-08T08:42:30.000Z | 2022-03-12T17:52:15.000Z | anuvaad-etl/anuvaad-nmt-models-fetch/src/utilities/app_context.py | ManavTriesStuff/anuvaad | 6993e3ac78818c171c173ccf8acf962ff57856a4 | [
"MIT"
] | 16 | 2021-01-21T01:38:51.000Z | 2022-01-20T08:59:52.000Z | anuvaad-etl/anuvaad-nmt-models-fetch/src/utilities/app_context.py | ManavTriesStuff/anuvaad | 6993e3ac78818c171c173ccf8acf962ff57856a4 | [
"MIT"
] | 25 | 2020-08-26T11:25:38.000Z | 2022-03-29T04:40:21.000Z | MODULE_CONTEXT = {'metadata':{'module':'ANUVAAD-NMT-MODELS'}}
def init():
global app_context
app_context = {
'application_context' : None
} | 22.857143 | 61 | 0.6375 | 17 | 160 | 5.764706 | 0.705882 | 0.204082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2125 | 160 | 7 | 62 | 22.857143 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0.31677 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
b95ac92d737ddca479e0436400b7a98edef75251 | 1,529 | py | Python | lib/coginvasion/base/ToontownIntervals.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | 1 | 2020-03-12T16:44:10.000Z | 2020-03-12T16:44:10.000Z | lib/coginvasion/base/ToontownIntervals.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | null | null | null | lib/coginvasion/base/ToontownIntervals.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: lib.coginvasion.base.ToontownIntervals
from direct.interval.MetaInterval import Sequence
from direct.interval.FunctionInterval import Wait, Func
PULSE_GUI_DURATION = 0.2
PULSE_GUI_CHANGE = 0.333
def cleanup(name):
taskMgr.remove(name)
def start(ival):
cleanup(ival.getName())
ival.start()
return ival
def loop(ival):
cleanup(ival.getName())
ival.loop()
return ival
def getPulseLargerIval(np, name, duration=PULSE_GUI_DURATION, scale=1):
return getPulseIval(np, name, 1 + PULSE_GUI_CHANGE, duration=duration, scale=scale)
def getPulseSmallerIval(np, name, duration=PULSE_GUI_DURATION, scale=1):
return getPulseIval(np, name, 1 - PULSE_GUI_CHANGE, duration=duration, scale=scale)
def getPulseIval(np, name, change, duration=PULSE_GUI_CHANGE, scale=1):
return Sequence(np.scaleInterval(duration, scale * change, blendType='easeOut'), np.scaleInterval(duration, scale, blendType='easeIn'), name=name, autoFinish=1)
def getPresentGuiIval(np, name, waitDuration=0.5, moveDuration=1.0, parent=aspect2d, startPos=(0, 0, 0)):
endPos = np.getPos()
np.setPos(parent, startPos[0], startPos[1], startPos[2])
return Sequence(Func(np.show), getPulseLargerIval(np, '', scale=np.getScale()), Wait(waitDuration), np.posInterval(moveDuration, endPos, blendType='easeInOut'), name=name, autoFinish=1) | 37.292683 | 189 | 0.742969 | 212 | 1,529 | 5.292453 | 0.377358 | 0.049911 | 0.049911 | 0.039216 | 0.231729 | 0.185383 | 0.185383 | 0.185383 | 0.185383 | 0.185383 | 0 | 0.050898 | 0.126226 | 1,529 | 41 | 189 | 37.292683 | 0.788922 | 0.139961 | 0 | 0.166667 | 0 | 0 | 0.016781 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.291667 | false | 0 | 0.083333 | 0.125 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
b964fef96d4df4b12576879746c7599001c2af8b | 112 | py | Python | seed_stage_based_messaging/__init__.py | praekeltfoundation/seed-stage-based-messaging | c1d39601c0d16fb32cebe7c2e288076c1dc4225b | [
"BSD-3-Clause"
] | 1 | 2017-08-17T14:17:53.000Z | 2017-08-17T14:17:53.000Z | seed_stage_based_messaging/__init__.py | praekelt/seed-stage-based-messaging | c1d39601c0d16fb32cebe7c2e288076c1dc4225b | [
"BSD-3-Clause"
] | 69 | 2016-02-19T06:58:00.000Z | 2018-11-26T09:43:42.000Z | seed_stage_based_messaging/__init__.py | praekeltfoundation/seed-stage-based-messaging | c1d39601c0d16fb32cebe7c2e288076c1dc4225b | [
"BSD-3-Clause"
] | 2 | 2016-09-28T09:32:00.000Z | 2017-08-18T06:18:36.000Z | from .celery import app as celery_app
__version__ = "0.13.1"
VERSION = __version__
__all__ = ("celery_app",)
| 14 | 37 | 0.723214 | 16 | 112 | 4.1875 | 0.625 | 0.268657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042553 | 0.160714 | 112 | 7 | 38 | 16 | 0.670213 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
b96d9daf1d0d4ccd91c98af2bc4cc3bb5fb75a47 | 10,175 | py | Python | examples/2-benchmark/fock_multigrid.py | QuESt-Calculator/pyscf | 0ed03633b699505c7278f1eb501342667d0aa910 | [
"Apache-2.0"
] | 8 | 2019-10-13T01:21:00.000Z | 2021-03-18T15:22:44.000Z | examples/2-benchmark/fock_multigrid.py | QuESt-Calculator/pyscf | 0ed03633b699505c7278f1eb501342667d0aa910 | [
"Apache-2.0"
] | 36 | 2018-08-22T19:44:03.000Z | 2020-05-09T10:02:36.000Z | examples/2-benchmark/fock_multigrid.py | QuESt-Calculator/pyscf | 0ed03633b699505c7278f1eb501342667d0aa910 | [
"Apache-2.0"
] | 23 | 2020-05-28T00:49:22.000Z | 2021-04-21T15:04:19.000Z | #!/usr/bin/env python
import os
import time
import numpy as np
import pyscf
from pyscf.pbc.dft import multigrid
log = pyscf.lib.logger.Logger(verbose=5)
with open('/proc/cpuinfo') as f:
for line in f:
if 'model name' in line:
log.note(line[:-1])
break
with open('/proc/meminfo') as f:
log.note(f.readline()[:-1])
log.note('OMP_NUM_THREADS=%s\n', os.environ.get('OMP_NUM_THREADS', None))
boxlen = 12.4138
cell0 = pyscf.M(a = np.eye(3) * boxlen,
atom = """
O 12.235322 1.376642 10.869880
O 6.445390 3.706940 8.650794
O 0.085977 2.181322 8.276663
O 12.052554 2.671366 2.147199
O 12.250036 4.190930 12.092014
O 7.187422 0.959062 4.733469
O 8.346457 7.210040 4.667644
O 12.361546 11.527875 8.106887
O 3.299984 4.440816 9.193275
O 2.855829 3.759909 6.552815
O 1.392494 6.362753 0.586172
O 1.858645 8.694013 2.068738
O 3.770231 12.094519 8.652183
O 6.432508 3.669828 2.772418
O 1.998724 1.820217 4.876440
O 8.248581 2.404730 6.931303
O 5.753814 3.360029 12.461534
O 11.322212 5.649239 2.236798
O 4.277318 2.113956 10.590808
O 5.405015 3.349247 5.484702
O 6.493278 11.869958 0.684912
O 3.275250 2.346576 2.425241
O 7.981003 6.352512 7.507970
O 5.985990 6.512854 12.194648
O 10.636714 11.856872 12.209540
O 9.312283 3.670384 3.508594
O 1.106885 5.830301 6.638695
O 8.008007 3.326363 10.869818
O 12.403000 9.687405 11.761901
O 4.219782 7.085315 8.153470
O 3.781557 8.203821 11.563272
O 11.088898 4.532081 7.809475
O 10.387548 8.408890 1.017882
O 1.979016 6.418091 10.374159
O 4.660547 0.549666 5.617403
O 8.745880 12.256257 8.089383
O 2.662041 10.489890 0.092980
O 7.241661 10.471815 4.226946
O 2.276827 0.276647 10.810417
O 8.887733 0.946877 1.333885
O 1.943554 8.088552 7.567650
O 9.667942 8.056759 9.868847
O 10.905491 8.339638 6.484782
O 3.507733 4.862402 1.557439
O 8.010457 8.642846 12.055969
O 8.374446 10.035932 6.690309
O 5.635247 6.076875 5.563993
O 11.728434 1.601906 5.079475
O 9.771134 9.814114 3.548703
O 3.944355 10.563450 4.687536
O 0.890357 6.382287 4.065806
O 6.862447 6.425182 2.488202
O 3.813963 6.595122 3.762649
O 6.562448 8.295463 8.807182
O 9.809455 0.143325 3.886553
O 4.117074 11.661225 2.221679
O 5.295317 8.735561 2.763183
O 9.971999 5.379339 5.340378
O 12.254708 8.643874 3.957116
O 2.344274 10.761274 6.829162
O 7.013416 0.643488 10.518797
O 5.152349 10.233624 10.359388
O 11.184278 5.884064 10.298279
O 12.252335 8.974142 9.070831
H 12.415139 2.233125 11.257611
H 11.922476 1.573799 9.986994
H 5.608192 3.371543 8.971482
H 6.731226 3.060851 8.004962
H -0.169205 1.565594 7.589645
H -0.455440 2.954771 8.118939
H 12.125168 2.826463 1.205443
H 12.888828 2.969761 2.504745
H 11.553255 4.386613 11.465566
H 12.818281 4.960808 12.067151
H 7.049495 1.772344 4.247898
H 6.353019 0.798145 5.174047
H 7.781850 7.384852 5.420566
H 9.103203 6.754017 5.035898
H 12.771232 11.788645 8.931744
H 12.018035 10.650652 8.276334
H 3.557245 3.792529 9.848846
H 2.543844 4.884102 9.577958
H 2.320235 4.521250 6.329813
H 2.872128 3.749963 7.509824
H 1.209685 7.121391 1.140501
H 2.238885 6.038801 0.894245
H 2.763109 8.856353 2.336735
H 1.329379 9.047369 2.783755
H 4.315639 11.533388 9.203449
H 3.098742 12.433043 9.244412
H 5.987369 3.448974 3.590530
H 5.813096 3.419344 2.086985
H 1.057126 1.675344 4.969379
H 2.248496 2.292119 5.670892
H 8.508264 1.653337 7.464411
H 8.066015 2.034597 6.067646
H 5.197835 2.915542 11.821572
H 6.630900 3.329981 12.079371
H 10.788986 6.436672 2.127933
H 11.657923 5.463602 1.359832
H 3.544476 1.634958 10.977765
H 4.755770 1.455054 10.087655
H 4.465371 3.375459 5.665294
H 5.682663 4.264430 5.524498
H 6.174815 11.778676 1.582954
H 5.713640 12.089924 0.174999
H 3.476076 1.498708 2.028983
H 2.730229 2.134295 3.182949
H 7.119624 5.936450 7.474030
H 8.536492 5.799405 6.958665
H 5.909499 5.717477 11.667621
H 6.125402 6.196758 13.087330
H 11.203499 12.513536 11.804844
H 10.260930 12.300153 12.970145
H 9.985036 3.927685 2.878172
H 8.545584 3.468329 2.972331
H 1.399882 6.620092 7.093246
H 0.963561 6.112523 5.735345
H 8.067363 3.674002 9.979955
H 8.000737 2.375959 10.756190
H 11.821629 10.402510 12.020482
H 12.206854 8.983242 12.379892
H 3.461473 7.606485 7.889688
H 3.844478 6.304711 8.560946
H 3.179884 7.585614 11.148494
H 4.401957 7.652030 12.039573
H 11.573777 5.053211 7.169515
H 10.342076 4.186083 7.320831
H 10.065640 8.919194 1.760981
H 9.629585 8.322499 0.439729
H 1.396302 6.546079 9.625630
H 1.405516 6.479759 11.138049
H 4.024008 1.232518 5.405828
H 4.736858 0.579881 6.571077
H 9.452293 12.313381 8.732772
H 8.976559 11.502788 7.545965
H 1.834701 10.012311 0.153462
H 3.295197 9.836403 -0.204175
H 7.056724 11.401702 4.095264
H 6.499038 10.020287 3.825865
H 1.365541 0.487338 11.013887
H 2.501591 -0.428131 11.417871
H 8.644279 1.812362 1.005409
H 8.142674 0.388030 1.112955
H 1.272659 8.365063 8.191888
H 2.142485 8.877768 7.063867
H 8.961493 7.826192 9.265523
H 9.227102 8.487654 10.601118
H 10.150144 7.758934 6.392768
H 10.596082 9.187988 6.167290
H 3.463106 4.096188 2.129414
H 3.919461 4.539801 0.755791
H 7.418998 9.394959 12.028876
H 7.430413 7.883095 12.106546
H 7.972905 10.220334 5.841196
H 7.675111 9.631498 7.203725
H 5.332446 6.381336 6.419473
H 5.000025 6.434186 4.943466
H 11.575078 2.271167 4.412540
H 11.219802 0.847030 4.783357
H 8.865342 9.721516 3.843998
H 10.000732 10.719285 3.758898
H 3.186196 10.476397 5.265333
H 4.407331 11.335128 5.013723
H 0.558187 7.255936 3.859331
H 0.341672 5.789383 3.552346
H 7.459933 6.526049 3.229193
H 6.696228 5.483739 2.440372
H 3.864872 6.313007 2.849385
H 2.876419 6.621201 3.953862
H 5.631529 8.079145 8.753997
H 7.003296 7.568245 8.367822
H 9.615413 0.527902 3.031755
H 8.962985 0.109366 4.332162
H 3.825854 11.139182 1.474087
H 4.063988 11.063232 2.967211
H 5.784391 7.914558 2.708486
H 4.780461 8.655167 3.566110
H 10.880659 5.444664 5.046607
H 9.593331 4.687991 4.797350
H 11.562317 8.960134 3.376765
H 11.926084 8.816948 4.839320
H 2.856874 11.297981 7.433660
H 1.492332 11.195517 6.786033
H 7.145820 0.090200 9.749009
H 7.227275 0.077690 11.260665
H 4.662021 9.538430 10.798155
H 5.994537 9.833472 10.142985
H 10.544299 6.595857 10.301445
H 11.281750 5.653082 9.374494
H 12.103020 8.841164 10.006916
H 11.491592 8.576221 8.647557
""",
basis = 'gth-tzv2p',
pseudo = 'gth-pade',
max_memory = 50000,
precision = 1e-6)
for xc in ('lsda', 'pbe'):
for images in ([1,1,1], [2,1,1], [2,2,1], [2,2,2]):
cell = pbc.tools.super_cell(cell0, images)
nao = cell.nao
log.note('nao = %d', nao)
dm = np.random.random((nao,nao))
dm = dm + dm.T
mf = cell.RKS().set(xc=xc)
mf.with_df = multigrid.MultiGridFFTDF(cell)
cpu0 = time.clock(), time.time()
v = mf.get_veff(cell, dm)
log.timer('Fock build (xc=%s, nao=%d)' % (xc, nao), *cpu0)
| 43.297872 | 73 | 0.487666 | 1,519 | 10,175 | 3.261356 | 0.444371 | 0.005248 | 0.004845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.72884 | 0.435676 | 10,175 | 234 | 74 | 43.482906 | 0.133925 | 0.001966 | 0 | 0 | 0 | 0 | 0.901517 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022124 | 0 | 0.022124 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
b996af58c01bdc1ff488178f5c25d4e120a3c6a7 | 9,066 | py | Python | maya/analytics/analyticNodeTypes.py | arjun-namdeo/py_stubs | 605bb167e239978f5417f3f1fc1f5c12e2a243cc | [
"MIT"
] | null | null | null | maya/analytics/analyticNodeTypes.py | arjun-namdeo/py_stubs | 605bb167e239978f5417f3f1fc1f5c12e2a243cc | [
"MIT"
] | null | null | null | maya/analytics/analyticNodeTypes.py | arjun-namdeo/py_stubs | 605bb167e239978f5417f3f1fc1f5c12e2a243cc | [
"MIT"
] | null | null | null | """
Analytic class for examining node type distribution. This analytic collects
the number of each node type in the scene.
All of the persistent and default nodes are skipped unless they have a new
connection. The way these two things are measured is different by necessity
for the cases of analyzing a file that is read and analyzing the current scene.
If the file is being read:
- persistent and default nodes are defined as any node present before the
file is loaded
- exceptions are made if a new connection is formed to a persistent or
default node after the file is loaded
If the current scene is used:
- persistent and default nodes are taken to be those marked as such by the
Maya 'ls' command. This won't include any special persistent nodes
created after-the-fact, such as those a newly loaded plug-in might create.
- exceptions are made if there is any connection at all to these default
or persistent nodes to a scene node.
If the 'summary' option is used then the output includes a dictionary
consisting of NODE_TYPE keys with value equal to the number of nodes of that
type in the scene, not including default node types. Only node types with at
least 1 node of that type are included.
"summary" : {
"transform" : 3,
"mesh" : 1
}
For normal output the output is a dictionary whose keys are the node types and
the values are a list of nodes of that type. The information is put into an
object named "node_types". This avoids the potential for a name conflict
between the object "summary" and a node type also named "summary".
"nodeTypes" : {
"transform" : ["transform1", "transform2", "group1"],
"mesh" : ["cubeShape1"]
}
If the 'details' option is used then the output is arranged hierarchically by
node type layers instead of a flat dictionary.
"nodeTypeTree" : {
"ROOT_NODE" : {
"nodes" : [],
"children" : {
"CHILD_NODE" : {
"nodes" : [],
"children" : {
"GRANDCHILD_NODE_TYPE1" : {
"nodes" : ["GC1_NODE_NAME],
"children" : []
},
"GRANDCHILD_NODE_TYPE2" : {
"nodes" : ["GC2_NODE_NAME],
"children" : []
}
}
}
}
}
}
If the analytic-specific option 'use_defaults' is used then the default nodes
will be included in the output.
"""
from maya.analytics.decorators import addHelp
from maya.analytics.dg_utilities import node_type_hierarchy_list
from maya.analytics.dg_utilities import default_nodes_and_connections
from maya.analytics.dg_utilities import node_level_connections
from maya.analytics.BaseAnalytic import BaseAnalytic
from maya.analytics.decorators import makeAnalytic
from maya.analytics.decorators import addMethodDocs
class analyticNodeTypes(BaseAnalytic):
"""
This class provides scene stats collection on node types.
"""
def __init__(self):
"""
Initialize the persistent class members
default_nodes: Set of all default nodes
default_node_connections: Set of (src,dst) pairs for all connections
between default nodes.
"""
pass
def establish_baseline(self):
"""
This is run on an empty scene, to find all of the nodes/node types
present by default. They will all be ignored for the purposes of
the analytic since they are not relevant to scene contents.
"""
pass
def run(self):
"""
Generates the number of nodes of each type in a scene in the
CSV form "node_type","Count", ordered from most frequent to least
frequent.
If the 'details' option is set then insert two extra columns:
"Depth" containing the number of parents the given node type has,
"Hierarchy" containing a "|"-separated string with all of the
node types above that one in the hierarchy, starting with it
and working upwards.
It will also include lines for all of the node types that have no
corresponding nodes in the scene, signified by a "Count" of 0.
"""
pass
def help():
"""
Call this method to print the class documentation, including all methods.
"""
pass
ANALYTIC_NAME = 'NodeTypes'
KEY_CHILDREN = 'children'
KEY_NODES = 'nodes'
KEY_NODE_TYPES = 'nodeTypes'
__fulldocs__ = 'This class provides scene stats collection on node types.\nBase class for output for analytics.\n\nThe default location for the anlaytic output is in a subdirectory\ncalled \'MayaAnalytics\' in your temp directory. You can change that\nat any time by calling set_output_directory().\n\nClass static member:\n ANALYTIC_NAME : Name of the analytic\n\nClass members:\n directory : Directory the output will go to\n is_static : True means this analytic doesn\'t require a file to run\n logger : Logging object for errors, warnings, and messages\n plug_namer : Object creating plug names, possibly anonymous\n node_namer : Object creating node names, possibly anonymous\n csv_output : Location to store legacy CSV output\n plug_namer : Set by option \'anonymous\' - if True then make plug names anonymous\n node_namer : Set by option \'anonymous\' - if True then make node names anonymous\n __options : List of per-analytic options\n\n\tMethods\n\t-------\n\tdebug : Utility to standardize debug messages coming from analytics.\n\n\terror : Utility to standardize errors coming from analytics.\n\n\testablish_baseline : This is run on an empty scene, to find all of the nodes/node types\n\t present by default. They will all be ignored for the purposes of\n\t the analytic since they are not relevant to scene contents.\n\n\thelp : Call this method to print the class documentation, including all methods.\n\n\tjson_file : Although an analytic is free to create any set of output files it\n\t wishes there will always be one master JSON file containing the\n\n\tlog : Utility to standardize logging messages coming from analytics.\n\n\tmarker_file : Returns the name of the marker file used to indicate that the\n\t computation of an analytic is in progress. If this file remains\n\t in a directory after the analytic has run that means it was\n\t interrupted and the data is not up to date.\n\t \n\t This file provides a safety measure against machines going down\n\t or analytics crashing.\n\n\tname : Get the name of this type of analytic\n\n\toption : Return TRUE if the option specified has been set on this analytic.\n\t option: Name of option to check\n\n\toutput_files : This is used to get the list of files the analytic will generate.\n\t There will always be a JSON file generated which contains at minimum\n\t the timing information. An analytic should override this method only\n\t if they are adding more output files (e.g. a .jpg file).\n\t \n\t This should only be called after the final directory has been set.\n\n\trun : Generates the number of nodes of each type in a scene in the\n\t CSV form "node_type","Count", ordered from most frequent to least\n\t frequent.\n\t \n\t If the \'details\' option is set then insert two extra columns:\n\t "Depth" containing the number of parents the given node type has,\n\t "Hierarchy" containing a "|"-separated string with all of the\n\t node types above that one in the hierarchy, starting with it\n\t and working upwards.\n\t It will also include lines for all of the node types that have no\n\t corresponding nodes in the scene, signified by a "Count" of 0.\n\n\tset_options : Modify the settings controlling the run operation of the analytic.\n\t Override this method if your analytic has some different options\n\t available to it, but be sure to call this parent version after since\n\t it sets common options.\n\n\tset_output_directory : Call this method to set a specific directory as the output location.\n\t The special names \'stdout\' and \'stderr\' are recognized as the\n\t output and error streams respectively rather than a directory.\n\n\twarning : Utility to standardize warnings coming from analytics.\n'
is_static = False
OPTION_DETAILS = 'details'
OPTION_SUMMARY = 'summary'
OPTION_USE_DEFAULTS = 'use_defaults'
| 57.018868 | 4,135 | 0.656519 | 1,297 | 9,066 | 4.537394 | 0.25983 | 0.010535 | 0.009516 | 0.013594 | 0.319626 | 0.272897 | 0.248768 | 0.235854 | 0.235854 | 0.190314 | 0 | 0.002006 | 0.285131 | 9,066 | 158 | 4,136 | 57.379747 | 0.906033 | 0.407567 | 0 | 0.16 | 0 | 0.08 | 0.11549 | 0.006243 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16 | false | 0.16 | 0.28 | 0 | 0.72 | 0.04 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
b99e5d04ef4bd6eb8262702bcadaaf576804112b | 428 | py | Python | action_plugins/run_once.py | locationlabs/ansible-action-plugins | 4fc4d371d985919aae8c34b947f3600f327640be | [
"MIT"
] | 18 | 2015-02-10T06:35:12.000Z | 2020-04-29T15:00:58.000Z | action_plugins/run_once.py | locationlabs/ansible-action-plugins | 4fc4d371d985919aae8c34b947f3600f327640be | [
"MIT"
] | 2 | 2016-07-20T19:16:13.000Z | 2018-11-10T15:31:22.000Z | action_plugins/run_once.py | locationlabs/ansible-action-plugins | 4fc4d371d985919aae8c34b947f3600f327640be | [
"MIT"
] | 5 | 2016-08-06T17:10:54.000Z | 2021-09-08T09:41:36.000Z | from ansible.runner.return_data import ReturnData
class ActionModule(object):
def __init__(self, runner):
self.runner = runner
self.runner.run_once = True
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
return ReturnData(conn=conn,
comm_ok=True,
result=dict(failed=False, changed=False, msg="YOLO"))
| 32.923077 | 92 | 0.626168 | 51 | 428 | 5.058824 | 0.666667 | 0.116279 | 0.124031 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.273364 | 428 | 12 | 93 | 35.666667 | 0.829582 | 0 | 0 | 0 | 0 | 0 | 0.009346 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0.111111 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
b9a08e1dac5235a3265aee8b8afe5e1b2071adc9 | 45,262 | py | Python | object_detection/protos/center_net_pb2.py | cattypkung/TFObjectDetectionAPIExtractor | d2515572cc0c4f507b6efd04f6d07c87c00173e8 | [
"MIT"
] | null | null | null | object_detection/protos/center_net_pb2.py | cattypkung/TFObjectDetectionAPIExtractor | d2515572cc0c4f507b6efd04f6d07c87c00173e8 | [
"MIT"
] | null | null | null | object_detection/protos/center_net_pb2.py | cattypkung/TFObjectDetectionAPIExtractor | d2515572cc0c4f507b6efd04f6d07c87c00173e8 | [
"MIT"
] | null | null | null | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/center_net.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from object_detection.protos import image_resizer_pb2 as object__detection_dot_protos_dot_image__resizer__pb2
from object_detection.protos import losses_pb2 as object__detection_dot_protos_dot_losses__pb2
from object_detection.protos import post_processing_pb2 as object__detection_dot_protos_dot_post__processing__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_detection/protos/center_net.proto',
package='object_detection.protos',
syntax='proto2',
serialized_pb=_b('\n(object_detection/protos/center_net.proto\x12\x17object_detection.protos\x1a+object_detection/protos/image_resizer.proto\x1a$object_detection/protos/losses.proto\x1a-object_detection/protos/post_processing.proto\"\x86\x18\n\tCenterNet\x12\x13\n\x0bnum_classes\x18\x01 \x01(\x05\x12M\n\x11\x66\x65\x61ture_extractor\x18\x02 \x01(\x0b\x32\x32.object_detection.protos.CenterNetFeatureExtractor\x12<\n\rimage_resizer\x18\x03 \x01(\x0b\x32%.object_detection.protos.ImageResizer\x12\x1c\n\ruse_depthwise\x18\r \x01(\x08:\x05\x66\x61lse\x12%\n\x16\x63ompute_heatmap_sparse\x18\x0f \x01(\x08:\x05\x66\x61lse\x12Q\n\x15object_detection_task\x18\x04 \x01(\x0b\x32\x32.object_detection.protos.CenterNet.ObjectDetection\x12S\n\x14object_center_params\x18\x05 \x01(\x0b\x32\x35.object_detection.protos.CenterNet.ObjectCenterParams\x12\x1f\n\x17keypoint_label_map_path\x18\x06 \x01(\t\x12W\n\x18keypoint_estimation_task\x18\x07 \x03(\x0b\x32\x35.object_detection.protos.CenterNet.KeypointEstimation\x12O\n\x14mask_estimation_task\x18\x08 \x01(\x0b\x32\x31.object_detection.protos.CenterNet.MaskEstimation\x12Y\n\x19\x64\x65nsepose_estimation_task\x18\t \x01(\x0b\x32\x36.object_detection.protos.CenterNet.DensePoseEstimation\x12Q\n\x15track_estimation_task\x18\n \x01(\x0b\x32\x32.object_detection.protos.CenterNet.TrackEstimation\x12Y\n\x14temporal_offset_task\x18\x0c \x01(\x0b\x32;.object_detection.protos.CenterNet.TemporalOffsetEstimation\x12@\n\x0fpost_processing\x18\x18 \x01(\x0b\x32\'.object_detection.protos.PostProcessing\x1a\xcb\x01\n\x0fObjectDetection\x12\x1b\n\x10task_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12\x1d\n\x12offset_loss_weight\x18\x03 \x01(\x02:\x01\x31\x12\x1e\n\x11scale_loss_weight\x18\x04 \x01(\x02:\x03\x30.1\x12\x44\n\x11localization_loss\x18\x08 \x01(\x0b\x32).object_detection.protos.LocalizationLossJ\x04\x08\x02\x10\x03J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08\x1a\x8e\x02\n\x12ObjectCenterParams\x12$\n\x19object_center_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12H\n\x13\x63lassification_loss\x18\x02 \x01(\x0b\x32+.object_detection.protos.ClassificationLoss\x12 \n\x11heatmap_bias_init\x18\x03 \x01(\x02:\x05-2.19\x12 \n\x13min_box_overlap_iou\x18\x04 \x01(\x02:\x03\x30.7\x12 \n\x13max_box_predictions\x18\x05 \x01(\x05:\x03\x31\x30\x30\x12\"\n\x13use_labeled_classes\x18\x06 \x01(\x08:\x05\x66\x61lse\x1a\xac\x06\n\x12KeypointEstimation\x12\x11\n\ttask_name\x18\x01 \x01(\t\x12\x1b\n\x10task_loss_weight\x18\x02 \x01(\x02:\x01\x31\x12+\n\x04loss\x18\x03 \x01(\x0b\x32\x1d.object_detection.protos.Loss\x12\x1b\n\x13keypoint_class_name\x18\x04 \x01(\t\x12l\n\x15keypoint_label_to_std\x18\x05 \x03(\x0b\x32M.object_detection.protos.CenterNet.KeypointEstimation.KeypointLabelToStdEntry\x12*\n\x1fkeypoint_regression_loss_weight\x18\x06 \x01(\x02:\x01\x31\x12\'\n\x1ckeypoint_heatmap_loss_weight\x18\x07 \x01(\x02:\x01\x31\x12&\n\x1bkeypoint_offset_loss_weight\x18\x08 \x01(\x02:\x01\x31\x12 \n\x11heatmap_bias_init\x18\t \x01(\x02:\x05-2.19\x12/\n\"keypoint_candidate_score_threshold\x18\n \x01(\x02:\x03\x30.1\x12(\n\x1bnum_candidates_per_keypoint\x18\x0b \x01(\x05:\x03\x31\x30\x30\x12$\n\x19peak_max_pool_kernel_size\x18\x0c \x01(\x05:\x01\x33\x12%\n\x18unmatched_keypoint_score\x18\r \x01(\x02:\x03\x30.1\x12\x16\n\tbox_scale\x18\x0e \x01(\x02:\x03\x31.2\x12#\n\x16\x63\x61ndidate_search_scale\x18\x0f \x01(\x02:\x03\x30.3\x12,\n\x16\x63\x61ndidate_ranking_mode\x18\x10 \x01(\t:\x0cmin_distance\x12\x1d\n\x12offset_peak_radius\x18\x11 \x01(\x05:\x01\x30\x12\"\n\x13per_keypoint_offset\x18\x12 \x01(\x08:\x05\x66\x61lse\x1a\x39\n\x17KeypointLabelToStdEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\xea\x01\n\x0eMaskEstimation\x12\x1b\n\x10task_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12H\n\x13\x63lassification_loss\x18\x02 \x01(\x0b\x32+.object_detection.protos.ClassificationLoss\x12\x18\n\x0bmask_height\x18\x04 \x01(\x05:\x03\x32\x35\x36\x12\x17\n\nmask_width\x18\x05 \x01(\x05:\x03\x32\x35\x36\x12\x1c\n\x0fscore_threshold\x18\x06 \x01(\x02:\x03\x30.5\x12 \n\x11heatmap_bias_init\x18\x03 \x01(\x02:\x05-2.19\x1a\x8f\x02\n\x13\x44\x65nsePoseEstimation\x12\x1b\n\x10task_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12\x10\n\x08\x63lass_id\x18\x02 \x01(\x05\x12+\n\x04loss\x18\x03 \x01(\x0b\x32\x1d.object_detection.protos.Loss\x12\x15\n\tnum_parts\x18\x04 \x01(\x05:\x02\x32\x34\x12\x1b\n\x10part_loss_weight\x18\x05 \x01(\x02:\x01\x31\x12!\n\x16\x63oordinate_loss_weight\x18\x06 \x01(\x02:\x01\x31\x12#\n\x15upsample_to_input_res\x18\x07 \x01(\x08:\x04true\x12 \n\x11heatmap_bias_init\x18\x08 \x01(\x02:\x05-2.19\x1a\xc7\x01\n\x0fTrackEstimation\x12\x1b\n\x10task_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\rnum_track_ids\x18\x02 \x01(\x05\x12\x1c\n\x0freid_embed_size\x18\x03 \x01(\x05:\x03\x31\x32\x38\x12\x18\n\rnum_fc_layers\x18\x04 \x01(\x05:\x01\x31\x12H\n\x13\x63lassification_loss\x18\x05 \x01(\x0b\x32+.object_detection.protos.ClassificationLoss\x1a}\n\x18TemporalOffsetEstimation\x12\x1b\n\x10task_loss_weight\x18\x01 \x01(\x02:\x01\x31\x12\x44\n\x11localization_loss\x18\x02 \x01(\x0b\x32).object_detection.protos.LocalizationLoss\"\x91\x01\n\x19\x43\x65nterNetFeatureExtractor\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x15\n\rchannel_means\x18\x02 \x03(\x02\x12\x14\n\x0c\x63hannel_stds\x18\x03 \x03(\x02\x12\x1b\n\x0c\x62gr_ordering\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\ruse_depthwise\x18\x05 \x01(\x08:\x05\x66\x61lse')
,
dependencies=[object__detection_dot_protos_dot_image__resizer__pb2.DESCRIPTOR,object__detection_dot_protos_dot_losses__pb2.DESCRIPTOR,object__detection_dot_protos_dot_post__processing__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CENTERNET_OBJECTDETECTION = _descriptor.Descriptor(
name='ObjectDetection',
full_name='object_detection.protos.CenterNet.ObjectDetection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.ObjectDetection.task_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset_loss_weight', full_name='object_detection.protos.CenterNet.ObjectDetection.offset_loss_weight', index=1,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_loss_weight', full_name='object_detection.protos.CenterNet.ObjectDetection.scale_loss_weight', index=2,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='localization_loss', full_name='object_detection.protos.CenterNet.ObjectDetection.localization_loss', index=3,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1147,
serialized_end=1350,
)
_CENTERNET_OBJECTCENTERPARAMS = _descriptor.Descriptor(
name='ObjectCenterParams',
full_name='object_detection.protos.CenterNet.ObjectCenterParams',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='object_center_loss_weight', full_name='object_detection.protos.CenterNet.ObjectCenterParams.object_center_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='classification_loss', full_name='object_detection.protos.CenterNet.ObjectCenterParams.classification_loss', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='heatmap_bias_init', full_name='object_detection.protos.CenterNet.ObjectCenterParams.heatmap_bias_init', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-2.19),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_box_overlap_iou', full_name='object_detection.protos.CenterNet.ObjectCenterParams.min_box_overlap_iou', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.7),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_box_predictions', full_name='object_detection.protos.CenterNet.ObjectCenterParams.max_box_predictions', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=100,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_labeled_classes', full_name='object_detection.protos.CenterNet.ObjectCenterParams.use_labeled_classes', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1353,
serialized_end=1623,
)
_CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY = _descriptor.Descriptor(
name='KeypointLabelToStdEntry',
full_name='object_detection.protos.CenterNet.KeypointEstimation.KeypointLabelToStdEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='object_detection.protos.CenterNet.KeypointEstimation.KeypointLabelToStdEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='object_detection.protos.CenterNet.KeypointEstimation.KeypointLabelToStdEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2381,
serialized_end=2438,
)
_CENTERNET_KEYPOINTESTIMATION = _descriptor.Descriptor(
name='KeypointEstimation',
full_name='object_detection.protos.CenterNet.KeypointEstimation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_name', full_name='object_detection.protos.CenterNet.KeypointEstimation.task_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.KeypointEstimation.task_loss_weight', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss', full_name='object_detection.protos.CenterNet.KeypointEstimation.loss', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_class_name', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_class_name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_label_to_std', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_label_to_std', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_regression_loss_weight', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_regression_loss_weight', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_heatmap_loss_weight', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_heatmap_loss_weight', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_offset_loss_weight', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_offset_loss_weight', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='heatmap_bias_init', full_name='object_detection.protos.CenterNet.KeypointEstimation.heatmap_bias_init', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-2.19),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_candidate_score_threshold', full_name='object_detection.protos.CenterNet.KeypointEstimation.keypoint_candidate_score_threshold', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_candidates_per_keypoint', full_name='object_detection.protos.CenterNet.KeypointEstimation.num_candidates_per_keypoint', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=100,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='peak_max_pool_kernel_size', full_name='object_detection.protos.CenterNet.KeypointEstimation.peak_max_pool_kernel_size', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='unmatched_keypoint_score', full_name='object_detection.protos.CenterNet.KeypointEstimation.unmatched_keypoint_score', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='box_scale', full_name='object_detection.protos.CenterNet.KeypointEstimation.box_scale', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1.2),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='candidate_search_scale', full_name='object_detection.protos.CenterNet.KeypointEstimation.candidate_search_scale', index=14,
number=15, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.3),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='candidate_ranking_mode', full_name='object_detection.protos.CenterNet.KeypointEstimation.candidate_ranking_mode', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("min_distance").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset_peak_radius', full_name='object_detection.protos.CenterNet.KeypointEstimation.offset_peak_radius', index=16,
number=17, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='per_keypoint_offset', full_name='object_detection.protos.CenterNet.KeypointEstimation.per_keypoint_offset', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1626,
serialized_end=2438,
)
_CENTERNET_MASKESTIMATION = _descriptor.Descriptor(
name='MaskEstimation',
full_name='object_detection.protos.CenterNet.MaskEstimation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.MaskEstimation.task_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='classification_loss', full_name='object_detection.protos.CenterNet.MaskEstimation.classification_loss', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mask_height', full_name='object_detection.protos.CenterNet.MaskEstimation.mask_height', index=2,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mask_width', full_name='object_detection.protos.CenterNet.MaskEstimation.mask_width', index=3,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=256,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='score_threshold', full_name='object_detection.protos.CenterNet.MaskEstimation.score_threshold', index=4,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='heatmap_bias_init', full_name='object_detection.protos.CenterNet.MaskEstimation.heatmap_bias_init', index=5,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-2.19),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2441,
serialized_end=2675,
)
_CENTERNET_DENSEPOSEESTIMATION = _descriptor.Descriptor(
name='DensePoseEstimation',
full_name='object_detection.protos.CenterNet.DensePoseEstimation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.DensePoseEstimation.task_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='class_id', full_name='object_detection.protos.CenterNet.DensePoseEstimation.class_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss', full_name='object_detection.protos.CenterNet.DensePoseEstimation.loss', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_parts', full_name='object_detection.protos.CenterNet.DensePoseEstimation.num_parts', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=24,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='part_loss_weight', full_name='object_detection.protos.CenterNet.DensePoseEstimation.part_loss_weight', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coordinate_loss_weight', full_name='object_detection.protos.CenterNet.DensePoseEstimation.coordinate_loss_weight', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='upsample_to_input_res', full_name='object_detection.protos.CenterNet.DensePoseEstimation.upsample_to_input_res', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='heatmap_bias_init', full_name='object_detection.protos.CenterNet.DensePoseEstimation.heatmap_bias_init', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-2.19),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2678,
serialized_end=2949,
)
_CENTERNET_TRACKESTIMATION = _descriptor.Descriptor(
name='TrackEstimation',
full_name='object_detection.protos.CenterNet.TrackEstimation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.TrackEstimation.task_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_track_ids', full_name='object_detection.protos.CenterNet.TrackEstimation.num_track_ids', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reid_embed_size', full_name='object_detection.protos.CenterNet.TrackEstimation.reid_embed_size', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=128,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_fc_layers', full_name='object_detection.protos.CenterNet.TrackEstimation.num_fc_layers', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='classification_loss', full_name='object_detection.protos.CenterNet.TrackEstimation.classification_loss', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2952,
serialized_end=3151,
)
_CENTERNET_TEMPORALOFFSETESTIMATION = _descriptor.Descriptor(
name='TemporalOffsetEstimation',
full_name='object_detection.protos.CenterNet.TemporalOffsetEstimation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_loss_weight', full_name='object_detection.protos.CenterNet.TemporalOffsetEstimation.task_loss_weight', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='localization_loss', full_name='object_detection.protos.CenterNet.TemporalOffsetEstimation.localization_loss', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3153,
serialized_end=3278,
)
_CENTERNET = _descriptor.Descriptor(
name='CenterNet',
full_name='object_detection.protos.CenterNet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='object_detection.protos.CenterNet.num_classes', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feature_extractor', full_name='object_detection.protos.CenterNet.feature_extractor', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_resizer', full_name='object_detection.protos.CenterNet.image_resizer', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_depthwise', full_name='object_detection.protos.CenterNet.use_depthwise', index=3,
number=13, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='compute_heatmap_sparse', full_name='object_detection.protos.CenterNet.compute_heatmap_sparse', index=4,
number=15, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='object_detection_task', full_name='object_detection.protos.CenterNet.object_detection_task', index=5,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='object_center_params', full_name='object_detection.protos.CenterNet.object_center_params', index=6,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_label_map_path', full_name='object_detection.protos.CenterNet.keypoint_label_map_path', index=7,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keypoint_estimation_task', full_name='object_detection.protos.CenterNet.keypoint_estimation_task', index=8,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mask_estimation_task', full_name='object_detection.protos.CenterNet.mask_estimation_task', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='densepose_estimation_task', full_name='object_detection.protos.CenterNet.densepose_estimation_task', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='track_estimation_task', full_name='object_detection.protos.CenterNet.track_estimation_task', index=11,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='temporal_offset_task', full_name='object_detection.protos.CenterNet.temporal_offset_task', index=12,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='post_processing', full_name='object_detection.protos.CenterNet.post_processing', index=13,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CENTERNET_OBJECTDETECTION, _CENTERNET_OBJECTCENTERPARAMS, _CENTERNET_KEYPOINTESTIMATION, _CENTERNET_MASKESTIMATION, _CENTERNET_DENSEPOSEESTIMATION, _CENTERNET_TRACKESTIMATION, _CENTERNET_TEMPORALOFFSETESTIMATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=200,
serialized_end=3278,
)
_CENTERNETFEATUREEXTRACTOR = _descriptor.Descriptor(
name='CenterNetFeatureExtractor',
full_name='object_detection.protos.CenterNetFeatureExtractor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='object_detection.protos.CenterNetFeatureExtractor.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_means', full_name='object_detection.protos.CenterNetFeatureExtractor.channel_means', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_stds', full_name='object_detection.protos.CenterNetFeatureExtractor.channel_stds', index=2,
number=3, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bgr_ordering', full_name='object_detection.protos.CenterNetFeatureExtractor.bgr_ordering', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_depthwise', full_name='object_detection.protos.CenterNetFeatureExtractor.use_depthwise', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3281,
serialized_end=3426,
)
_CENTERNET_OBJECTDETECTION.fields_by_name['localization_loss'].message_type = object__detection_dot_protos_dot_losses__pb2._LOCALIZATIONLOSS
_CENTERNET_OBJECTDETECTION.containing_type = _CENTERNET
_CENTERNET_OBJECTCENTERPARAMS.fields_by_name['classification_loss'].message_type = object__detection_dot_protos_dot_losses__pb2._CLASSIFICATIONLOSS
_CENTERNET_OBJECTCENTERPARAMS.containing_type = _CENTERNET
_CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY.containing_type = _CENTERNET_KEYPOINTESTIMATION
_CENTERNET_KEYPOINTESTIMATION.fields_by_name['loss'].message_type = object__detection_dot_protos_dot_losses__pb2._LOSS
_CENTERNET_KEYPOINTESTIMATION.fields_by_name['keypoint_label_to_std'].message_type = _CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY
_CENTERNET_KEYPOINTESTIMATION.containing_type = _CENTERNET
_CENTERNET_MASKESTIMATION.fields_by_name['classification_loss'].message_type = object__detection_dot_protos_dot_losses__pb2._CLASSIFICATIONLOSS
_CENTERNET_MASKESTIMATION.containing_type = _CENTERNET
_CENTERNET_DENSEPOSEESTIMATION.fields_by_name['loss'].message_type = object__detection_dot_protos_dot_losses__pb2._LOSS
_CENTERNET_DENSEPOSEESTIMATION.containing_type = _CENTERNET
_CENTERNET_TRACKESTIMATION.fields_by_name['classification_loss'].message_type = object__detection_dot_protos_dot_losses__pb2._CLASSIFICATIONLOSS
_CENTERNET_TRACKESTIMATION.containing_type = _CENTERNET
_CENTERNET_TEMPORALOFFSETESTIMATION.fields_by_name['localization_loss'].message_type = object__detection_dot_protos_dot_losses__pb2._LOCALIZATIONLOSS
_CENTERNET_TEMPORALOFFSETESTIMATION.containing_type = _CENTERNET
_CENTERNET.fields_by_name['feature_extractor'].message_type = _CENTERNETFEATUREEXTRACTOR
_CENTERNET.fields_by_name['image_resizer'].message_type = object__detection_dot_protos_dot_image__resizer__pb2._IMAGERESIZER
_CENTERNET.fields_by_name['object_detection_task'].message_type = _CENTERNET_OBJECTDETECTION
_CENTERNET.fields_by_name['object_center_params'].message_type = _CENTERNET_OBJECTCENTERPARAMS
_CENTERNET.fields_by_name['keypoint_estimation_task'].message_type = _CENTERNET_KEYPOINTESTIMATION
_CENTERNET.fields_by_name['mask_estimation_task'].message_type = _CENTERNET_MASKESTIMATION
_CENTERNET.fields_by_name['densepose_estimation_task'].message_type = _CENTERNET_DENSEPOSEESTIMATION
_CENTERNET.fields_by_name['track_estimation_task'].message_type = _CENTERNET_TRACKESTIMATION
_CENTERNET.fields_by_name['temporal_offset_task'].message_type = _CENTERNET_TEMPORALOFFSETESTIMATION
_CENTERNET.fields_by_name['post_processing'].message_type = object__detection_dot_protos_dot_post__processing__pb2._POSTPROCESSING
DESCRIPTOR.message_types_by_name['CenterNet'] = _CENTERNET
DESCRIPTOR.message_types_by_name['CenterNetFeatureExtractor'] = _CENTERNETFEATUREEXTRACTOR
CenterNet = _reflection.GeneratedProtocolMessageType('CenterNet', (_message.Message,), dict(
ObjectDetection = _reflection.GeneratedProtocolMessageType('ObjectDetection', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_OBJECTDETECTION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.ObjectDetection)
))
,
ObjectCenterParams = _reflection.GeneratedProtocolMessageType('ObjectCenterParams', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_OBJECTCENTERPARAMS,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.ObjectCenterParams)
))
,
KeypointEstimation = _reflection.GeneratedProtocolMessageType('KeypointEstimation', (_message.Message,), dict(
KeypointLabelToStdEntry = _reflection.GeneratedProtocolMessageType('KeypointLabelToStdEntry', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.KeypointEstimation.KeypointLabelToStdEntry)
))
,
DESCRIPTOR = _CENTERNET_KEYPOINTESTIMATION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.KeypointEstimation)
))
,
MaskEstimation = _reflection.GeneratedProtocolMessageType('MaskEstimation', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_MASKESTIMATION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.MaskEstimation)
))
,
DensePoseEstimation = _reflection.GeneratedProtocolMessageType('DensePoseEstimation', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_DENSEPOSEESTIMATION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.DensePoseEstimation)
))
,
TrackEstimation = _reflection.GeneratedProtocolMessageType('TrackEstimation', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_TRACKESTIMATION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.TrackEstimation)
))
,
TemporalOffsetEstimation = _reflection.GeneratedProtocolMessageType('TemporalOffsetEstimation', (_message.Message,), dict(
DESCRIPTOR = _CENTERNET_TEMPORALOFFSETESTIMATION,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet.TemporalOffsetEstimation)
))
,
DESCRIPTOR = _CENTERNET,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNet)
))
_sym_db.RegisterMessage(CenterNet)
_sym_db.RegisterMessage(CenterNet.ObjectDetection)
_sym_db.RegisterMessage(CenterNet.ObjectCenterParams)
_sym_db.RegisterMessage(CenterNet.KeypointEstimation)
_sym_db.RegisterMessage(CenterNet.KeypointEstimation.KeypointLabelToStdEntry)
_sym_db.RegisterMessage(CenterNet.MaskEstimation)
_sym_db.RegisterMessage(CenterNet.DensePoseEstimation)
_sym_db.RegisterMessage(CenterNet.TrackEstimation)
_sym_db.RegisterMessage(CenterNet.TemporalOffsetEstimation)
CenterNetFeatureExtractor = _reflection.GeneratedProtocolMessageType('CenterNetFeatureExtractor', (_message.Message,), dict(
DESCRIPTOR = _CENTERNETFEATUREEXTRACTOR,
__module__ = 'object_detection.protos.center_net_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.CenterNetFeatureExtractor)
))
_sym_db.RegisterMessage(CenterNetFeatureExtractor)
_CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY.has_options = True
_CENTERNET_KEYPOINTESTIMATION_KEYPOINTLABELTOSTDENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| 52.326012 | 5,456 | 0.770691 | 5,815 | 45,262 | 5.672055 | 0.062081 | 0.053361 | 0.081497 | 0.08277 | 0.76212 | 0.712943 | 0.678531 | 0.615317 | 0.566474 | 0.548859 | 0 | 0.045344 | 0.115174 | 45,262 | 864 | 5,457 | 52.386574 | 0.778222 | 0.024016 | 0 | 0.684211 | 1 | 0.003672 | 0.259216 | 0.228691 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.011016 | 0 | 0.011016 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
b9a3c15b78578c37446bcec99565fb474ac0ba93 | 259 | py | Python | vespawatch/management/commands/_utils.py | kmannnish/vespa-watch | eb92246bd5d68d5f4972acc02e86e67e86b77f63 | [
"MIT"
] | 3 | 2019-01-17T09:22:13.000Z | 2020-06-05T03:46:45.000Z | vespawatch/management/commands/_utils.py | kmannnish/vespa-watch | eb92246bd5d68d5f4972acc02e86e67e86b77f63 | [
"MIT"
] | 406 | 2018-06-13T18:36:53.000Z | 2022-03-22T16:41:50.000Z | vespawatch/management/commands/_utils.py | kmannnish/vespa-watch | eb92246bd5d68d5f4972acc02e86e67e86b77f63 | [
"MIT"
] | 8 | 2019-02-12T10:05:08.000Z | 2020-10-01T05:39:13.000Z | from django.core.management import BaseCommand
class VespaWatchCommand(BaseCommand):
def __init__(self, *args, **kwargs):
super(VespaWatchCommand, self).__init__(*args, **kwargs)
self.w = self.stdout.write # Alias to save keystrokes :) | 32.375 | 65 | 0.710425 | 29 | 259 | 6.068966 | 0.724138 | 0.113636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173745 | 259 | 8 | 65 | 32.375 | 0.82243 | 0.104247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
b9c5f78760a5aea48ef654429b73b47f8460103e | 511 | py | Python | tests/test_post.py | kibetrono/Blogging-Website | e7f2574979a152fa0f6cb83a5e5ffea24665c585 | [
"Unlicense"
] | null | null | null | tests/test_post.py | kibetrono/Blogging-Website | e7f2574979a152fa0f6cb83a5e5ffea24665c585 | [
"Unlicense"
] | null | null | null | tests/test_post.py | kibetrono/Blogging-Website | e7f2574979a152fa0f6cb83a5e5ffea24665c585 | [
"Unlicense"
] | null | null | null | import unittest
from app.models import Post, User
from app import db
from unittest import TestCase
class TestPost(unittest.TestCase):
def setUp(self):
self.user_kibet= User(username='kibet', password='flasksApp', email='kibetdavidro@gmail.com',biography="New Knowledge on Flask",profile_pic="xxxx")
self.new_pitch = Post(title="Pitch", category='promotion', pitch="Get new pitch", user=self.user_kibet)
def tearDown(self):
Post.query.delete()
User.query.delete()
| 25.55 | 155 | 0.708415 | 68 | 511 | 5.264706 | 0.558824 | 0.039106 | 0.072626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168297 | 511 | 19 | 156 | 26.894737 | 0.842353 | 0 | 0 | 0 | 0 | 0 | 0.175889 | 0.043478 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0.090909 | 0.363636 | 0 | 0.636364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 3 |
b9eea0237e67485113354893390a7ff322137bd4 | 2,101 | py | Python | app/companyapp/api/serializers.py | ElPinguiino/django_nginx_postgres_docker | db8e7b395bd3c807e2991fe3bbf5e4fc63f11c77 | [
"MIT"
] | null | null | null | app/companyapp/api/serializers.py | ElPinguiino/django_nginx_postgres_docker | db8e7b395bd3c807e2991fe3bbf5e4fc63f11c77 | [
"MIT"
] | null | null | null | app/companyapp/api/serializers.py | ElPinguiino/django_nginx_postgres_docker | db8e7b395bd3c807e2991fe3bbf5e4fc63f11c77 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from ..models import *
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = ('customer_id' ,'first_name' ,'last_name ','address','city','state','country','phone','email','birthdate')
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ('product_id' ,'product_name' ,'product_type' ,'product_cost' ,'product_price' ,'digital_product' ,'product_tag','product_image')
class OrderSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ('order_id' ,'customer_id' ,'date' ,'price' ,'cost','complete' )
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('employee_id','first_name','last_name','phone','email' ,'address','city','state','country','role','schedule','image','pay_rate')
class InspectionSerializer(serializers.ModelSerializer):
class Meta:
model = Inspection
fields = ('inspection_id','inspection_date','inspection_reason','inspection_person','inspection_message')
class ExpenseSerializer(serializers.ModelSerializer):
class Meta:
model = Expense
fields = ('expense_id','expense_type','expense_cost','expense_date','expense_person','expense_message','expense_added')
class TaskSerializer(serializers.ModelSerializer):
class Meta:
model = Task
fields = ('task_id','task_message','task_date','task_person','task_added','task_status')
class BadgeSerializer(serializers.ModelSerializer):
class Meta:
model = Badge
fields = ('badge_id','badge_name','badge_image','badge_description')
class DocumentSerializer(serializers.ModelSerializer):
class Meta:
model = Document
fields = ('document_id','document_name','document_type','document_file','document_added')
class RecipeSerializer(serializers.ModelSerializer):
class Meta:
model = Recipe
fields = ('recipe_id','recipe_name','recipe_type','recipe_description') | 40.403846 | 146 | 0.705378 | 214 | 2,101 | 6.696262 | 0.285047 | 0.181438 | 0.216329 | 0.244243 | 0.305652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15802 | 2,101 | 52 | 147 | 40.403846 | 0.810062 | 0 | 0 | 0.238095 | 0 | 0 | 0.326356 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 0.52381 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 3 |
b9f68d5adc5b0b57793de6c522ceea21fc0a8be8 | 157 | py | Python | 2373.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | 1 | 2022-01-14T08:45:32.000Z | 2022-01-14T08:45:32.000Z | 2373.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | null | null | null | 2373.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | null | null | null | n = int(input())
ans = 0
for i in range(n):
l, c = map(int, input().split())
if l > c:
ans += c
else:
continue
print(ans) | 17.444444 | 37 | 0.452229 | 25 | 157 | 2.84 | 0.68 | 0.225352 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010309 | 0.382166 | 157 | 9 | 38 | 17.444444 | 0.721649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 |
b9f6d9633f9d12db0b34bc9905ac793a298de73f | 149 | py | Python | sherpa_client/models/annotation_status.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | sherpa_client/models/annotation_status.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | sherpa_client/models/annotation_status.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | from enum import Enum
class AnnotationStatus(str, Enum):
OK = "OK"
KO = "KO"
def __str__(self) -> str:
return str(self.value)
| 14.9 | 34 | 0.597315 | 20 | 149 | 4.25 | 0.6 | 0.164706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.281879 | 149 | 9 | 35 | 16.555556 | 0.794393 | 0 | 0 | 0 | 0 | 0 | 0.026846 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0.166667 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 3 |
b9f8c39c974720c28d8812dc12a59d3b3b1b25d4 | 346 | py | Python | hifive/api/rest/HFBaseWeatherRequest.py | gdfdfg/openmusic-python-sdk | 8c5df944ae223b3b46ae249684786552391b6c88 | [
"MIT"
] | null | null | null | hifive/api/rest/HFBaseWeatherRequest.py | gdfdfg/openmusic-python-sdk | 8c5df944ae223b3b46ae249684786552391b6c88 | [
"MIT"
] | null | null | null | hifive/api/rest/HFBaseWeatherRequest.py | gdfdfg/openmusic-python-sdk | 8c5df944ae223b3b46ae249684786552391b6c88 | [
"MIT"
] | 1 | 2022-02-28T02:49:40.000Z | 2022-02-28T02:49:40.000Z | '''
Created by yong.huang on 2016.11.04
'''
from hifive.api.base import RestApi
class HFBaseWeatherRequest(RestApi):
def __init__(self,domain=None,port=80):
domain = domain or 'hifive-gateway-test.hifiveai.com';
RestApi.__init__(self,domain, port)
self.clientId = None
self.location = None
def getapiname(self):
return 'BaseWeather'
| 24.714286 | 56 | 0.745665 | 48 | 346 | 5.208333 | 0.6875 | 0.064 | 0.112 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033223 | 0.130058 | 346 | 13 | 57 | 26.615385 | 0.797342 | 0.101156 | 0 | 0 | 0 | 0 | 0.141914 | 0.105611 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0.111111 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.