hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f73d0a70138e299b2e68e833326cde7a43a828e0 | 1,720 | py | Python | tests/python/gaia-ui-tests/gaiatest/tests/functional/browser/test_browser_save_image.py | marshall/gaia | 00722269f5d559595fd2f79d9dd70310758af08c | [
"Apache-2.0"
] | 1 | 2019-02-13T23:44:14.000Z | 2019-02-13T23:44:14.000Z | tests/python/gaia-ui-tests/gaiatest/tests/functional/browser/test_browser_save_image.py | marshall/gaia | 00722269f5d559595fd2f79d9dd70310758af08c | [
"Apache-2.0"
] | null | null | null | tests/python/gaia-ui-tests/gaiatest/tests/functional/browser/test_browser_save_image.py | marshall/gaia | 00722269f5d559595fd2f79d9dd70310758af08c | [
"Apache-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
try:
from marionette.marionette import Actions
except:
from marionette_driver.marionette import Actions
from gaiatest import GaiaTestCase
from gaiatest.apps.search.app import Search
from gaiatest.apps.system.regions.activities import Activities
from gaiatest.apps.system.app import System
class TestBrowserSaveImage(GaiaTestCase):
def setUp(self):
GaiaTestCase.setUp(self)
self.connect_to_local_area_network()
self.test_url = self.marionette.absolute_url('IMG_0001.jpg')
def test_browser_save_image(self):
"""
https://moztrap.mozilla.org/manage/case/6889/
"""
# Check that there are no images on sdcard before saving
self.assertEqual(0, len(self.data_layer.sdcard_files('.jpeg')))
search = Search(self.marionette)
search.launch()
browser = search.go_to_url(self.test_url)
browser.switch_to_content()
# Long tap on the image inside the browser content
image = self.marionette.find_element('css selector', 'img')
Actions(self.marionette).\
press(image).\
wait(3).\
release().\
wait(1).\
perform()
activities = Activities(self.marionette)
activities.tap_save_image()
system = System(self.marionette)
system.wait_for_notification_toaster_displayed()
system.wait_for_notification_toaster_not_displayed()
self.assertEqual(1, len(self.data_layer.sdcard_files('.jpeg')))
| 31.272727 | 71 | 0.679651 |
try:
from marionette.marionette import Actions
except:
from marionette_driver.marionette import Actions
from gaiatest import GaiaTestCase
from gaiatest.apps.search.app import Search
from gaiatest.apps.system.regions.activities import Activities
from gaiatest.apps.system.app import System
class TestBrowserSaveImage(GaiaTestCase):
def setUp(self):
GaiaTestCase.setUp(self)
self.connect_to_local_area_network()
self.test_url = self.marionette.absolute_url('IMG_0001.jpg')
def test_browser_save_image(self):
self.assertEqual(0, len(self.data_layer.sdcard_files('.jpeg')))
search = Search(self.marionette)
search.launch()
browser = search.go_to_url(self.test_url)
browser.switch_to_content()
image = self.marionette.find_element('css selector', 'img')
Actions(self.marionette).\
press(image).\
wait(3).\
release().\
wait(1).\
perform()
activities = Activities(self.marionette)
activities.tap_save_image()
system = System(self.marionette)
system.wait_for_notification_toaster_displayed()
system.wait_for_notification_toaster_not_displayed()
self.assertEqual(1, len(self.data_layer.sdcard_files('.jpeg')))
| true | true |
f73d0a81543ea606e8f5a1821b795140ae5732d4 | 6,119 | py | Python | ansys/dpf/core/time_freq_scoping_factory.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 18 | 2021-10-16T10:38:29.000Z | 2022-03-29T11:26:42.000Z | ansys/dpf/core/time_freq_scoping_factory.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 79 | 2021-10-11T23:18:54.000Z | 2022-03-29T14:53:14.000Z | ansys/dpf/core/time_freq_scoping_factory.py | jfthuong/pydpf-core | bf2895ebc546e0004f759289bfc9a23196559ac3 | [
"MIT"
] | 5 | 2021-11-29T18:35:37.000Z | 2022-03-16T16:49:21.000Z | """
time_freq_scoping_factory
=========================
Contains functions to simplify creating time frequency scopings.
"""
from ansys.dpf.core import Scoping
from ansys.dpf.core import errors as dpf_errors
from ansys.dpf.core.common import locations
from ansys.dpf.core.model import Model
def scoping_by_load_step(load_step, server=None):
"""Create a specific ``ansys.dpf.core.Scoping`` for a given load step.
The returned scoping describes a specific time frequency support element
for a given load step.
Parameters
----------
load_step : int
Load step ID of the specific time frequency scoping.
server : DpfServer, optional
Server with the channel connected to the remote or local instance.
The default is ``None``, in which case an attempt is made to use the
global server.
Returns
-------
scoping : Scoping
Scoping targeting one load step.
"""
scoping = Scoping(server=server, ids=[load_step], location=locations.time_freq_step)
return scoping
def scoping_by_load_steps(load_steps, server=None):
"""Create a specific :class:`ansys.dpf.core.Scoping` for a given list of load steps.
The returned scoping describes a specific time frequency support element
for a given list of load steps.
Parameters
----------
load_steps : list[int]
List of load steps IDs of the specific time frequency scoping.
server : DpfServer, optional
Server with the channel connected to the remote or local instance.
The default is ``None``, in which case an attempt is made to use the
global server.
Returns
-------
scoping : Scoping
Scoping targeting several load_steps.
"""
if not isinstance(load_steps, list):
raise dpf_errors.InvalidTypeError("list", "load_steps")
scoping = Scoping(server=server, ids=load_steps, location=locations.time_freq_step)
return scoping
def scoping_by_set(cumulative_set, server=None):
"""Create a specific :class:`ansys.dpf.core.Scoping` for a given cumulative set index.
The returned scoping describes a specific time frequency support element for a given
cumulative set index.
Parameters
----------
cumulative_set : int
Cumulative index of the set.
server : DpfServer, optional
Server with the channel connected to the remote or local instance.
The default is ``None``, in which case an attempt is made to use the
global server.
Returns
-------
scoping : Scoping
Scoping targeting one set (referenced by cumulative index).
"""
scoping = Scoping(server=server, ids=[cumulative_set], location=locations.time_freq)
return scoping
def scoping_by_sets(cumulative_sets, server=None):
"""Create a specific :class:`ansys.dpf.core.Scoping` for a given list of cumulative set indices.
The returned scoping describes a specific time frequency support element for a given
list of cumulative sets of indices.
Parameters
----------
cumulative_sets : list[int]
List of cumulative indices of the sets.
server : DpfServer, optional
Server with the channel connected to the remote or local instance.
The default is ``None``, in which case an attempt is made to use the
global server.
Returns
-------
scoping : Scoping
Scoping targeting severals sets (referenced by cumulative indices).
"""
if not isinstance(cumulative_sets, list):
raise dpf_errors.InvalidTypeError("list", "cumulative_sets")
scoping = Scoping(server=server, ids=cumulative_sets, location=locations.time_freq)
return scoping
def scoping_by_step_and_substep(
load_step_id, subset_id, time_freq_support
):
"""Create a specific :class:`ansys.dpf.core.Scoping` for a given step and subset.
The returned scoping describes a specific time frequency support element for a given
step and substep.
Parameters
----------
load_step_id : int
ID of the load step.
subset_id : int
ID of the subset.
time_freq_support : TimeFreqSupport
Returns
-------
scoping : Scoping
Scoping based on a given step and substep of a time frequency support.
"""
set_index = time_freq_support.get_cumulative_index(load_step_id - 1, subset_id - 1)
scoping = Scoping(
ids=[set_index + 1],
location=locations.time_freq,
server=time_freq_support._server)
return scoping
def scoping_by_step_and_substep_from_model(load_step_id, subset_id, model, server=None):
"""Create a specific ``ansys.dpf.core.Scoping`` for a given step and substep.
The returned scoping describes a specific model's time freq support element for a given
step and substep.
Parameters
----------
load_step_id : int
ID of the step.
subset_id : int
ID of the subset.
model : Model
server : DpfServer, optional
Server with the channel connected to the remote or local instance.
The default is ``None``, in which case an attempt is made to use the
global server.
Returns
-------
scoping : Scoping
Scoping based on a given step/substep of a model's time_freq_support."""
return scoping_by_step_and_substep(
load_step_id, subset_id, model.metadata.time_freq_support
)
def scoping_on_all_time_freqs(tf_support_or_model):
"""Create a specific :class:`ansys.dpf.core.Scoping` with all time or
frequency sets of a :class:`ansys.dpf.core.TimeFreqSupport` or a class:`ansys.dpf.core.Model`
Parameters
----------
tf_support_or_model : TimeFreqSupport, Model
Returns
-------
scoping : Scoping
Scoping with all time or frequency sets IDs.
"""
if isinstance(tf_support_or_model, Model):
tf_support_or_model = tf_support_or_model.metadata.time_freq_support
return Scoping(
ids=range(1, len(tf_support_or_model.time_frequencies) + 1),
location=locations.time_freq,
server=tf_support_or_model._server)
| 32.547872 | 100 | 0.68655 |
from ansys.dpf.core import Scoping
from ansys.dpf.core import errors as dpf_errors
from ansys.dpf.core.common import locations
from ansys.dpf.core.model import Model
def scoping_by_load_step(load_step, server=None):
scoping = Scoping(server=server, ids=[load_step], location=locations.time_freq_step)
return scoping
def scoping_by_load_steps(load_steps, server=None):
if not isinstance(load_steps, list):
raise dpf_errors.InvalidTypeError("list", "load_steps")
scoping = Scoping(server=server, ids=load_steps, location=locations.time_freq_step)
return scoping
def scoping_by_set(cumulative_set, server=None):
scoping = Scoping(server=server, ids=[cumulative_set], location=locations.time_freq)
return scoping
def scoping_by_sets(cumulative_sets, server=None):
if not isinstance(cumulative_sets, list):
raise dpf_errors.InvalidTypeError("list", "cumulative_sets")
scoping = Scoping(server=server, ids=cumulative_sets, location=locations.time_freq)
return scoping
def scoping_by_step_and_substep(
load_step_id, subset_id, time_freq_support
):
set_index = time_freq_support.get_cumulative_index(load_step_id - 1, subset_id - 1)
scoping = Scoping(
ids=[set_index + 1],
location=locations.time_freq,
server=time_freq_support._server)
return scoping
def scoping_by_step_and_substep_from_model(load_step_id, subset_id, model, server=None):
return scoping_by_step_and_substep(
load_step_id, subset_id, model.metadata.time_freq_support
)
def scoping_on_all_time_freqs(tf_support_or_model):
if isinstance(tf_support_or_model, Model):
tf_support_or_model = tf_support_or_model.metadata.time_freq_support
return Scoping(
ids=range(1, len(tf_support_or_model.time_frequencies) + 1),
location=locations.time_freq,
server=tf_support_or_model._server)
| true | true |
f73d0b339751590edcee1d843cf956e4e643fcc3 | 6,148 | py | Python | src/oci/oda/models/create_digital_assistant_version_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/oda/models/create_digital_assistant_version_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/oda/models/create_digital_assistant_version_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .create_digital_assistant_details import CreateDigitalAssistantDetails
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateDigitalAssistantVersionDetails(CreateDigitalAssistantDetails):
"""
Properties that are required to create a new version of an existing Digital Assistant.
"""
def __init__(self, **kwargs):
"""
Initializes a new CreateDigitalAssistantVersionDetails object with values from keyword arguments. The default value of the :py:attr:`~oci.oda.models.CreateDigitalAssistantVersionDetails.kind` attribute
of this class is ``VERSION`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param kind:
The value to assign to the kind property of this CreateDigitalAssistantVersionDetails.
Allowed values for this property are: "NEW", "CLONE", "VERSION", "EXTEND"
:type kind: str
:param category:
The value to assign to the category property of this CreateDigitalAssistantVersionDetails.
:type category: str
:param description:
The value to assign to the description property of this CreateDigitalAssistantVersionDetails.
:type description: str
:param platform_version:
The value to assign to the platform_version property of this CreateDigitalAssistantVersionDetails.
:type platform_version: str
:param multilingual_mode:
The value to assign to the multilingual_mode property of this CreateDigitalAssistantVersionDetails.
Allowed values for this property are: "NATIVE", "TRANSLATION"
:type multilingual_mode: str
:param primary_language_tag:
The value to assign to the primary_language_tag property of this CreateDigitalAssistantVersionDetails.
:type primary_language_tag: str
:param freeform_tags:
The value to assign to the freeform_tags property of this CreateDigitalAssistantVersionDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this CreateDigitalAssistantVersionDetails.
:type defined_tags: dict(str, dict(str, object))
:param id:
The value to assign to the id property of this CreateDigitalAssistantVersionDetails.
:type id: str
:param version:
The value to assign to the version property of this CreateDigitalAssistantVersionDetails.
:type version: str
"""
self.swagger_types = {
'kind': 'str',
'category': 'str',
'description': 'str',
'platform_version': 'str',
'multilingual_mode': 'str',
'primary_language_tag': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'id': 'str',
'version': 'str'
}
self.attribute_map = {
'kind': 'kind',
'category': 'category',
'description': 'description',
'platform_version': 'platformVersion',
'multilingual_mode': 'multilingualMode',
'primary_language_tag': 'primaryLanguageTag',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'id': 'id',
'version': 'version'
}
self._kind = None
self._category = None
self._description = None
self._platform_version = None
self._multilingual_mode = None
self._primary_language_tag = None
self._freeform_tags = None
self._defined_tags = None
self._id = None
self._version = None
self._kind = 'VERSION'
@property
def id(self):
"""
**[Required]** Gets the id of this CreateDigitalAssistantVersionDetails.
The unique identifier of the Digital Assistant to create a new version of.
:return: The id of this CreateDigitalAssistantVersionDetails.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this CreateDigitalAssistantVersionDetails.
The unique identifier of the Digital Assistant to create a new version of.
:param id: The id of this CreateDigitalAssistantVersionDetails.
:type: str
"""
self._id = id
@property
def version(self):
"""
**[Required]** Gets the version of this CreateDigitalAssistantVersionDetails.
The resource's version. The version can only contain numbers, letters, periods, underscores, dashes or spaces. The version must begin with a letter or a number.
:return: The version of this CreateDigitalAssistantVersionDetails.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this CreateDigitalAssistantVersionDetails.
The resource's version. The version can only contain numbers, letters, periods, underscores, dashes or spaces. The version must begin with a letter or a number.
:param version: The version of this CreateDigitalAssistantVersionDetails.
:type: str
"""
self._version = version
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 37.950617 | 245 | 0.658426 |
from .create_digital_assistant_details import CreateDigitalAssistantDetails
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateDigitalAssistantVersionDetails(CreateDigitalAssistantDetails):
def __init__(self, **kwargs):
self.swagger_types = {
'kind': 'str',
'category': 'str',
'description': 'str',
'platform_version': 'str',
'multilingual_mode': 'str',
'primary_language_tag': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'id': 'str',
'version': 'str'
}
self.attribute_map = {
'kind': 'kind',
'category': 'category',
'description': 'description',
'platform_version': 'platformVersion',
'multilingual_mode': 'multilingualMode',
'primary_language_tag': 'primaryLanguageTag',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'id': 'id',
'version': 'version'
}
self._kind = None
self._category = None
self._description = None
self._platform_version = None
self._multilingual_mode = None
self._primary_language_tag = None
self._freeform_tags = None
self._defined_tags = None
self._id = None
self._version = None
self._kind = 'VERSION'
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f73d0bfc9c7fa46db6bcab3d6885939da199bcd7 | 944 | py | Python | leetcode/0078_Subsets/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | leetcode/0078_Subsets/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | leetcode/0078_Subsets/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
] | null | null | null | # !/usr/bin/env python3
# Author: C.K
# Email: theck17@163.com
# DateTime:2021-04-27 21:41:14
# Description:
import os
import sys
class Solution(object):
def subsets(self, nums):
ret = []
self.dfs(nums, [], ret)
return ret
def dfs(self, nums, path, ret):
ret.append(path)
for i in range(len(nums)):
self.dfs(nums[i + 1:], path + [nums[i]], ret)
# Bit Manipulation
def subsets2(self, nums):
res = []
nums.sort()
for i in xrange(1 << len(nums)):
tmp = []
for j in xrange(len(nums)):
if i & 1 << j: # if i >> j & 1:
tmp.append(nums[j])
res.append(tmp)
return res
# Iteratively
def subsets(self, nums):
res = [[]]
for num in sorted(nums):
res += [item + [num] for item in res]
return res
if __name__ == "__main__":
pass
| 21.454545 | 57 | 0.494703 |
import os
import sys
class Solution(object):
def subsets(self, nums):
ret = []
self.dfs(nums, [], ret)
return ret
def dfs(self, nums, path, ret):
ret.append(path)
for i in range(len(nums)):
self.dfs(nums[i + 1:], path + [nums[i]], ret)
def subsets2(self, nums):
res = []
nums.sort()
for i in xrange(1 << len(nums)):
tmp = []
for j in xrange(len(nums)):
if i & 1 << j:
tmp.append(nums[j])
res.append(tmp)
return res
def subsets(self, nums):
res = [[]]
for num in sorted(nums):
res += [item + [num] for item in res]
return res
if __name__ == "__main__":
pass
| true | true |
f73d0ccd60d4f9f3638e3275c7a342afcf7e8570 | 24,873 | py | Python | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/horovod/setup.py | mengkai94/training_results_v0.6 | 43dc3e250f8da47b5f8833197d74cb8cf1004fc9 | [
"Apache-2.0"
] | 42 | 2019-07-11T18:23:52.000Z | 2021-09-14T08:21:09.000Z | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/horovod/setup.py | mengkai94/training_results_v0.6 | 43dc3e250f8da47b5f8833197d74cb8cf1004fc9 | [
"Apache-2.0"
] | 23 | 2019-07-29T05:21:52.000Z | 2020-08-31T18:51:42.000Z | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/horovod/setup.py | mengkai94/training_results_v0.6 | 43dc3e250f8da47b5f8833197d74cb8cf1004fc9 | [
"Apache-2.0"
] | 51 | 2019-07-12T05:10:25.000Z | 2021-07-28T16:19:06.000Z | # Copyright 2017 Uber Technologies, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import print_function
import os
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
from distutils.errors import CompileError, DistutilsError, DistutilsPlatformError, LinkError
import shlex
import subprocess
import sys
import textwrap
import traceback
from horovod import __version__
common_mpi_lib = Extension('horovod.common.mpi_lib', [])
tensorflow_mpi_lib = Extension('horovod.tensorflow.mpi_lib', [])
torch_mpi_lib = Extension('horovod.torch.mpi_lib', [])
torch_mpi_lib_impl = Extension('horovod.torch.mpi_lib_impl', [])
def is_build_action():
if len(sys.argv) <= 1:
return False
if sys.argv[1].startswith('build'):
return True
if sys.argv[1].startswith('bdist'):
return True
if sys.argv[1].startswith('install'):
return True
def check_tf_version():
try:
import tensorflow as tf
if tf.__version__ < '1.1.0':
raise DistutilsPlatformError(
'Your TensorFlow version %s is outdated. '
'Horovod requires tensorflow>=1.1.0' % tf.__version__)
except ImportError:
raise DistutilsPlatformError(
'import tensorflow failed, is it installed?\n\n%s' % traceback.format_exc())
except AttributeError:
# This means that tf.__version__ was not exposed, which makes it *REALLY* old.
raise DistutilsPlatformError(
'Your TensorFlow version is outdated. Horovod requires tensorflow>=1.1.0')
def get_cpp_flags(build_ext):
last_err = None
default_flags = ['-std=c++11', '-fPIC', '-O2']
if sys.platform == 'darwin':
# Darwin most likely will have Clang, which has libc++.
flags_to_try = [default_flags + ['-stdlib=libc++'], default_flags]
else:
flags_to_try = [default_flags, default_flags + ['-stdlib=libc++']]
for cpp_flags in flags_to_try:
try:
test_compile(build_ext, 'test_cpp_flags', extra_preargs=cpp_flags,
code=textwrap.dedent('''\
#include <unordered_map>
void test() {
}
'''))
return cpp_flags
except (CompileError, LinkError):
last_err = 'Unable to determine C++ compilation flags (see error above).'
except Exception:
last_err = 'Unable to determine C++ compilation flags. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_include_dirs():
import tensorflow as tf
tf_inc = tf.sysconfig.get_include()
return [tf_inc, '%s/external/nsync/public' % tf_inc]
def get_tf_lib_dirs():
import tensorflow as tf
tf_lib = tf.sysconfig.get_lib()
return [tf_lib]
def get_tf_libs(build_ext, lib_dirs, cpp_flags):
last_err = None
for tf_libs in [['tensorflow_framework'], []]:
try:
lib_file = test_compile(build_ext, 'test_tensorflow_libs',
library_dirs=lib_dirs, libraries=tf_libs,
extra_preargs=cpp_flags,
code=textwrap.dedent('''\
void test() {
}
'''))
from tensorflow.python.framework import load_library
load_library.load_op_library(lib_file)
return tf_libs
except (CompileError, LinkError):
last_err = 'Unable to determine -l link flags to use with TensorFlow (see error above).'
except Exception:
last_err = 'Unable to determine -l link flags to use with TensorFlow. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_abi(build_ext, include_dirs, lib_dirs, libs, cpp_flags):
last_err = None
cxx11_abi_macro = '_GLIBCXX_USE_CXX11_ABI'
for cxx11_abi in ['0', '1']:
try:
lib_file = test_compile(build_ext, 'test_tensorflow_abi',
macros=[(cxx11_abi_macro, cxx11_abi)],
include_dirs=include_dirs, library_dirs=lib_dirs,
libraries=libs, extra_preargs=cpp_flags,
code=textwrap.dedent('''\
#include <string>
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/shape_inference.h"
void test() {
auto ignore = tensorflow::strings::StrCat("a", "b");
}
'''))
from tensorflow.python.framework import load_library
load_library.load_op_library(lib_file)
return cxx11_abi_macro, cxx11_abi
except (CompileError, LinkError):
last_err = 'Unable to determine CXX11 ABI to use with TensorFlow (see error above).'
except Exception:
last_err = 'Unable to determine CXX11 ABI to use with TensorFlow. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_flags(build_ext, cpp_flags):
import tensorflow as tf
try:
return tf.sysconfig.get_compile_flags(), tf.sysconfig.get_link_flags()
except AttributeError:
# fallback to the previous logic
tf_include_dirs = get_tf_include_dirs()
tf_lib_dirs = get_tf_lib_dirs()
tf_libs = get_tf_libs(build_ext, tf_lib_dirs, cpp_flags)
tf_abi = get_tf_abi(build_ext, tf_include_dirs,
tf_lib_dirs, tf_libs, cpp_flags)
compile_flags = []
for include_dir in tf_include_dirs:
compile_flags.append('-I%s' % include_dir)
if tf_abi:
compile_flags.append('-D%s=%s' % tf_abi)
link_flags = []
for lib_dir in tf_lib_dirs:
link_flags.append('-L%s' % lib_dir)
for lib in tf_libs:
link_flags.append('-l%s' % lib)
return compile_flags, link_flags
def get_mpi_flags():
show_command = os.environ.get('HOROVOD_MPICXX_SHOW', 'mpicxx -show')
try:
mpi_show_output = subprocess.check_output(
shlex.split(show_command), universal_newlines=True).strip()
mpi_show_args = shlex.split(mpi_show_output)
if not mpi_show_args[0].startswith('-'):
# Open MPI and MPICH print compiler name as a first word, skip it
mpi_show_args = mpi_show_args[1:]
# strip off compiler call portion and always escape each arg
return ' '.join(['"' + arg.replace('"', '"\'"\'"') + '"'
for arg in mpi_show_args])
except Exception:
raise DistutilsPlatformError(
'%s failed (see error below), is MPI in $PATH?\n'
'Note: If your version of MPI has a custom command to show compilation flags, '
'please specify it with the HOROVOD_MPICXX_SHOW environment variable.\n\n'
'%s' % (show_command, traceback.format_exc()))
def test_compile(build_ext, name, code, libraries=None, include_dirs=None, library_dirs=None, macros=None,
extra_preargs=None):
test_compile_dir = os.path.join(build_ext.build_temp, 'test_compile')
if not os.path.exists(test_compile_dir):
os.makedirs(test_compile_dir)
source_file = os.path.join(test_compile_dir, '%s.cc' % name)
with open(source_file, 'w') as f:
f.write(code)
compiler = build_ext.compiler
[object_file] = compiler.object_filenames([source_file])
shared_object_file = compiler.shared_object_filename(
name, output_dir=test_compile_dir)
compiler.compile([source_file], extra_preargs=extra_preargs,
include_dirs=include_dirs, macros=macros)
compiler.link_shared_object(
[object_file], shared_object_file, libraries=libraries, library_dirs=library_dirs)
return shared_object_file
def get_cuda_dirs(build_ext, cpp_flags):
cuda_include_dirs = []
cuda_lib_dirs = []
cuda_home = os.environ.get('HOROVOD_CUDA_HOME')
if cuda_home:
cuda_include_dirs += ['%s/include' % cuda_home]
cuda_lib_dirs += ['%s/lib' % cuda_home, '%s/lib64' % cuda_home]
cuda_include = os.environ.get('HOROVOD_CUDA_INCLUDE')
if cuda_include:
cuda_include_dirs += [cuda_include]
cuda_lib = os.environ.get('HOROVOD_CUDA_LIB')
if cuda_lib:
cuda_lib_dirs += [cuda_lib]
if not cuda_include_dirs and not cuda_lib_dirs:
# default to /usr/local/cuda
cuda_include_dirs += ['/usr/local/cuda/include']
cuda_lib_dirs += ['/usr/local/cuda/lib', '/usr/local/cuda/lib64']
try:
test_compile(build_ext, 'test_cuda', libraries=['cudart'], include_dirs=cuda_include_dirs,
library_dirs=cuda_lib_dirs, extra_preargs=cpp_flags, code=textwrap.dedent('''\
#include <cuda_runtime.h>
void test() {
cudaSetDevice(0);
}
'''))
except (CompileError, LinkError):
raise DistutilsPlatformError(
'CUDA library was not found (see error above).\n'
'Please specify correct CUDA location with the HOROVOD_CUDA_HOME '
'environment variable or combination of HOROVOD_CUDA_INCLUDE and '
'HOROVOD_CUDA_LIB environment variables.\n\n'
'HOROVOD_CUDA_HOME - path where CUDA include and lib directories can be found\n'
'HOROVOD_CUDA_INCLUDE - path to CUDA include directory\n'
'HOROVOD_CUDA_LIB - path to CUDA lib directory')
return cuda_include_dirs, cuda_lib_dirs
def get_nccl_vals(build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags):
nccl_include_dirs = []
nccl_lib_dirs = []
nccl_libs = []
nccl_home = os.environ.get('HOROVOD_NCCL_HOME')
if nccl_home:
nccl_include_dirs += ['%s/include' % nccl_home]
nccl_lib_dirs += ['%s/lib' % nccl_home, '%s/lib64' % nccl_home]
nccl_include_dir = os.environ.get('HOROVOD_NCCL_INCLUDE')
if nccl_include_dir:
nccl_include_dirs += [nccl_include_dir]
nccl_lib_dir = os.environ.get('HOROVOD_NCCL_LIB')
if nccl_lib_dir:
nccl_lib_dirs += [nccl_lib_dir]
nccl_link_mode = os.environ.get('HOROVOD_NCCL_LINK', 'STATIC')
if nccl_link_mode.upper() == 'SHARED':
nccl_libs += ['nccl']
else:
nccl_libs += ['nccl_static']
try:
test_compile(build_ext, 'test_nccl', libraries=nccl_libs, include_dirs=nccl_include_dirs + cuda_include_dirs,
library_dirs=nccl_lib_dirs + cuda_lib_dirs, extra_preargs=cpp_flags, code=textwrap.dedent('''\
#include <nccl.h>
#if NCCL_MAJOR < 2
#error Horovod requires NCCL 2.0 or later version, please upgrade.
#endif
void test() {
ncclUniqueId nccl_id;
ncclGetUniqueId(&nccl_id);
}
'''))
except (CompileError, LinkError):
raise DistutilsPlatformError(
'NCCL 2.0 library or its later version was not found (see error above).\n'
'Please specify correct NCCL location with the HOROVOD_NCCL_HOME '
'environment variable or combination of HOROVOD_NCCL_INCLUDE and '
'HOROVOD_NCCL_LIB environment variables.\n\n'
'HOROVOD_NCCL_HOME - path where NCCL include and lib directories can be found\n'
'HOROVOD_NCCL_INCLUDE - path to NCCL include directory\n'
'HOROVOD_NCCL_LIB - path to NCCL lib directory')
return nccl_include_dirs, nccl_lib_dirs, nccl_libs
def get_ddl_dirs():
# Default DDL home
ddl_home = '/opt/DL/ddl'
ddl_include_dir = '%s/include' % ddl_home
ddl_lib_dir = '%s/lib' % ddl_home
if not os.path.exists(ddl_lib_dir):
raise DistutilsPlatformError('DDL lib was not found. Please, make sure \'ddl\' package is installed.')
if not os.path.exists(ddl_include_dir):
raise DistutilsPlatformError('DDL include was not found. Please, make sure \'ddl-dev\' package is installed.')
return [ddl_include_dir], [ddl_lib_dir]
def get_common_options(build_ext):
cpp_flags = get_cpp_flags(build_ext)
mpi_flags = get_mpi_flags()
gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \
gpu_allreduce != 'DDL':
raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
'values are "", "MPI", "NCCL", "DDL".' % gpu_allreduce)
gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
if gpu_allgather and gpu_allgather != 'MPI':
raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
'values are "", "MPI".' % gpu_allgather)
gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
if gpu_broadcast and gpu_broadcast != 'MPI':
raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
'values are "", "MPI".' % gpu_broadcast)
if gpu_allreduce or gpu_allgather or gpu_broadcast:
have_cuda = True
cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags)
else:
have_cuda = False
cuda_include_dirs = cuda_lib_dirs = []
if gpu_allreduce == 'NCCL':
have_nccl = True
nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals(
build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags)
else:
have_nccl = False
nccl_include_dirs = nccl_lib_dirs = nccl_libs = []
if gpu_allreduce == 'DDL':
have_ddl = True
ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs()
else:
have_ddl = False
ddl_include_dirs = ddl_lib_dirs = []
MACROS = []
INCLUDES = []
SOURCES = []
COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags)
LINK_FLAGS = shlex.split(mpi_flags)
LIBRARY_DIRS = []
LIBRARIES = []
if have_cuda:
MACROS += [('HAVE_CUDA', '1')]
INCLUDES += cuda_include_dirs
LIBRARY_DIRS += cuda_lib_dirs
LIBRARIES += ['cudart']
if have_nccl:
MACROS += [('HAVE_NCCL', '1')]
INCLUDES += nccl_include_dirs
LINK_FLAGS += ['-Wl,--version-script=hide_nccl.lds']
LIBRARY_DIRS += nccl_lib_dirs
LIBRARIES += nccl_libs
if have_ddl:
MACROS += [('HAVE_DDL', '1')]
INCLUDES += ddl_include_dirs
LIBRARY_DIRS += ddl_lib_dirs
LIBRARIES += ['ddl', 'ddl_pack']
if gpu_allreduce:
MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]
if gpu_allgather:
MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]
if gpu_broadcast:
MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]
return dict(MACROS=MACROS,
INCLUDES=INCLUDES,
SOURCES=SOURCES,
COMPILE_FLAGS=COMPILE_FLAGS,
LINK_FLAGS=LINK_FLAGS,
LIBRARY_DIRS=LIBRARY_DIRS,
LIBRARIES=LIBRARIES)
def build_common_extension(build_ext, options, abi_compile_flags):
common_mpi_lib.define_macros = options['MACROS']
common_mpi_lib.include_dirs = options['INCLUDES']
common_mpi_lib.sources = options['SOURCES'] + ['horovod/common/common.cc',
'horovod/common/mpi_message.cc',
'horovod/common/operations.cc',
'horovod/common/timeline.cc']
common_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + \
abi_compile_flags
common_mpi_lib.extra_link_args = options['LINK_FLAGS']
common_mpi_lib.library_dirs = options['LIBRARY_DIRS']
common_mpi_lib.libraries = options['LIBRARIES']
build_ext.build_extension(common_mpi_lib)
def build_tf_extension(build_ext, options):
check_tf_version()
tf_compile_flags, tf_link_flags = get_tf_flags(
build_ext, options['COMPILE_FLAGS'])
tensorflow_mpi_lib.define_macros = options['MACROS']
tensorflow_mpi_lib.include_dirs = options['INCLUDES']
tensorflow_mpi_lib.sources = options['SOURCES'] + \
['horovod/tensorflow/mpi_ops.cc']
tensorflow_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + \
tf_compile_flags
tensorflow_mpi_lib.extra_link_args = options['LINK_FLAGS'] + tf_link_flags
tensorflow_mpi_lib.library_dirs = options['LIBRARY_DIRS']
tensorflow_mpi_lib.libraries = options['LIBRARIES']
build_ext.build_extension(tensorflow_mpi_lib)
# Return ABI flags used for TensorFlow compilation. We will use this flag
# to compile all the libraries.
return [flag for flag in tf_compile_flags if '_GLIBCXX_USE_CXX11_ABI' in flag]
def dummy_import_torch():
try:
import torch
except:
pass
def check_torch_import():
try:
import torch
except ImportError:
raise DistutilsPlatformError(
'import torch failed, is it installed?\n\n%s' % traceback.format_exc())
def is_torch_cuda():
try:
from torch.utils.ffi import create_extension
cuda_test_ext = create_extension(
name='horovod.torch.test_cuda',
headers=['horovod/torch/dummy.h'],
sources=[],
with_cuda=True,
extra_compile_args=['-std=c11', '-fPIC', '-O2']
)
cuda_test_ext.build()
return True
except:
print('INFO: Above error indicates that this PyTorch installation does not support CUDA.')
return False
def check_macro(macros, key):
return any(k == key and v for k, v in macros)
def set_macro(macros, key, new_value):
if any(k == key for k, _ in macros):
return [(k, new_value if k == key else v) for k, v in macros]
else:
return macros + [(key, new_value)]
class protect_files(object):
def __init__(self, *files):
self.files = files
def __enter__(self):
for file in self.files:
os.rename(file, file + '.protected')
def __exit__(self, type, value, traceback):
for file in self.files:
os.rename(file + '.protected', file)
def build_torch_extension(build_ext, options, abi_compile_flags):
check_torch_import()
have_cuda = is_torch_cuda()
if not have_cuda and check_macro(options['MACROS'], 'HAVE_CUDA'):
raise DistutilsPlatformError(
'Horovod build with GPU support was requested, but this PyTorch '
'installation does not support CUDA.')
# Update HAVE_CUDA to mean that PyTorch supports CUDA. Internally, we will be checking
# HOROVOD_GPU_(ALLREDUCE|ALLGATHER|BROADCAST) to decide whether we should use GPU
# version or transfer tensors to CPU memory for those operations.
updated_macros = set_macro(
options['MACROS'], 'HAVE_CUDA', str(int(have_cuda)))
# Create_extension overwrites these files which are customized, we need to protect them.
with protect_files('horovod/torch/mpi_lib/__init__.py',
'horovod/torch/mpi_lib_impl/__init__.py'):
from torch.utils.ffi import create_extension
ffi_iface = create_extension(
name='horovod.torch.mpi_lib',
headers=['horovod/torch/interface.h'] +
(['horovod/torch/interface_cuda.h'] if have_cuda else []),
with_cuda=have_cuda,
language='c',
package=True,
sources=[],
extra_compile_args=['-std=c11', '-fPIC', '-O2']
)
ffi_impl = create_extension(
name='horovod.torch.mpi_lib_impl',
headers=[],
with_cuda=have_cuda,
language='c++',
package=True,
source_extension='.cc',
define_macros=updated_macros,
include_dirs=options['INCLUDES'],
sources=options['SOURCES'] + ['horovod/torch/mpi_ops.cc',
'horovod/torch/handle_manager.cc',
'horovod/torch/ready_event.cc',
'horovod/torch/tensor_util.cc',
'horovod/torch/cuda_util.cc',
'horovod/torch/adapter.cc'],
extra_compile_args=options['COMPILE_FLAGS'] + abi_compile_flags,
extra_link_args=options['LINK_FLAGS'],
library_dirs=options['LIBRARY_DIRS'],
libraries=options['LIBRARIES']
)
for ffi, setuptools_ext in [(ffi_iface, torch_mpi_lib),
(ffi_impl, torch_mpi_lib_impl)]:
ffi_ext = ffi.distutils_extension()
# ffi_ext is distutils Extension, not setuptools Extension
for k, v in ffi_ext.__dict__.items():
setuptools_ext.__dict__[k] = v
build_ext.build_extension(setuptools_ext)
# run the customize_compiler
class custom_build_ext(build_ext):
def build_extensions(self):
options = get_common_options(self)
abi_compile_flags = []
built_plugins = []
# If PyTorch is installed, it must be imported before TensorFlow, otherwise
# we may get an error: dlopen: cannot load any more object with static TLS
dummy_import_torch()
if not os.environ.get('HOROVOD_WITHOUT_TENSORFLOW'):
try:
abi_compile_flags = build_tf_extension(self, options)
built_plugins.append(True)
except:
if not os.environ.get('HOROVOD_WITH_TENSORFLOW'):
print('INFO: Unable to build TensorFlow plugin, will skip it.\n\n'
'%s' % traceback.format_exc(), file=sys.stderr)
built_plugins.append(False)
else:
raise
if not os.environ.get('HOROVOD_WITHOUT_PYTORCH'):
try:
build_torch_extension(self, options, abi_compile_flags)
built_plugins.append(True)
except:
if not os.environ.get('HOROVOD_WITH_PYTORCH'):
print('INFO: Unable to build PyTorch plugin, will skip it.\n\n'
'%s' % traceback.format_exc(), file=sys.stderr)
built_plugins.append(False)
else:
raise
if not built_plugins:
raise DistutilsError(
'Both TensorFlow and PyTorch plugins were excluded from build. Aborting.')
if not any(built_plugins):
raise DistutilsError(
'Neither TensorFlow nor PyTorch plugins were built. See errors above.')
build_common_extension(self, options, abi_compile_flags)
setup(name='horovod',
version=__version__,
packages=find_packages(),
description='Distributed training framework for TensorFlow, Keras, and PyTorch.',
author='Uber Technologies, Inc.',
long_description=textwrap.dedent('''\
Horovod is a distributed training framework for TensorFlow, Keras, and PyTorch.
The goal of Horovod is to make distributed Deep Learning fast and easy to use.'''),
url='https://github.com/uber/horovod',
classifiers=[
'License :: OSI Approved :: Apache Software License'
],
ext_modules=[common_mpi_lib, tensorflow_mpi_lib,
torch_mpi_lib, torch_mpi_lib_impl],
cmdclass={'build_ext': custom_build_ext},
# cffi is required for PyTorch
# If cffi is specified in setup_requires, it will need libffi to be installed on the machine,
# which is undesirable. Luckily, `install` action will install cffi before executing build,
# so it's only necessary for `build*` or `bdist*` actions.
setup_requires=['cffi>=1.4.0'] if is_build_action() else [],
install_requires=['cffi>=1.4.0'],
zip_safe=False)
| 38.924883 | 118 | 0.625297 |
from __future__ import print_function
import os
from setuptools import setup, Extension, find_packages
from setuptools.command.build_ext import build_ext
from distutils.errors import CompileError, DistutilsError, DistutilsPlatformError, LinkError
import shlex
import subprocess
import sys
import textwrap
import traceback
from horovod import __version__
common_mpi_lib = Extension('horovod.common.mpi_lib', [])
tensorflow_mpi_lib = Extension('horovod.tensorflow.mpi_lib', [])
torch_mpi_lib = Extension('horovod.torch.mpi_lib', [])
torch_mpi_lib_impl = Extension('horovod.torch.mpi_lib_impl', [])
def is_build_action():
if len(sys.argv) <= 1:
return False
if sys.argv[1].startswith('build'):
return True
if sys.argv[1].startswith('bdist'):
return True
if sys.argv[1].startswith('install'):
return True
def check_tf_version():
try:
import tensorflow as tf
if tf.__version__ < '1.1.0':
raise DistutilsPlatformError(
'Your TensorFlow version %s is outdated. '
'Horovod requires tensorflow>=1.1.0' % tf.__version__)
except ImportError:
raise DistutilsPlatformError(
'import tensorflow failed, is it installed?\n\n%s' % traceback.format_exc())
except AttributeError:
raise DistutilsPlatformError(
'Your TensorFlow version is outdated. Horovod requires tensorflow>=1.1.0')
def get_cpp_flags(build_ext):
last_err = None
default_flags = ['-std=c++11', '-fPIC', '-O2']
if sys.platform == 'darwin':
flags_to_try = [default_flags + ['-stdlib=libc++'], default_flags]
else:
flags_to_try = [default_flags, default_flags + ['-stdlib=libc++']]
for cpp_flags in flags_to_try:
try:
test_compile(build_ext, 'test_cpp_flags', extra_preargs=cpp_flags,
code=textwrap.dedent('''\
#include <unordered_map>
void test() {
}
'''))
return cpp_flags
except (CompileError, LinkError):
last_err = 'Unable to determine C++ compilation flags (see error above).'
except Exception:
last_err = 'Unable to determine C++ compilation flags. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_include_dirs():
import tensorflow as tf
tf_inc = tf.sysconfig.get_include()
return [tf_inc, '%s/external/nsync/public' % tf_inc]
def get_tf_lib_dirs():
import tensorflow as tf
tf_lib = tf.sysconfig.get_lib()
return [tf_lib]
def get_tf_libs(build_ext, lib_dirs, cpp_flags):
last_err = None
for tf_libs in [['tensorflow_framework'], []]:
try:
lib_file = test_compile(build_ext, 'test_tensorflow_libs',
library_dirs=lib_dirs, libraries=tf_libs,
extra_preargs=cpp_flags,
code=textwrap.dedent('''\
void test() {
}
'''))
from tensorflow.python.framework import load_library
load_library.load_op_library(lib_file)
return tf_libs
except (CompileError, LinkError):
last_err = 'Unable to determine -l link flags to use with TensorFlow (see error above).'
except Exception:
last_err = 'Unable to determine -l link flags to use with TensorFlow. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_abi(build_ext, include_dirs, lib_dirs, libs, cpp_flags):
last_err = None
cxx11_abi_macro = '_GLIBCXX_USE_CXX11_ABI'
for cxx11_abi in ['0', '1']:
try:
lib_file = test_compile(build_ext, 'test_tensorflow_abi',
macros=[(cxx11_abi_macro, cxx11_abi)],
include_dirs=include_dirs, library_dirs=lib_dirs,
libraries=libs, extra_preargs=cpp_flags,
code=textwrap.dedent('''\
#include <string>
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/shape_inference.h"
void test() {
auto ignore = tensorflow::strings::StrCat("a", "b");
}
'''))
from tensorflow.python.framework import load_library
load_library.load_op_library(lib_file)
return cxx11_abi_macro, cxx11_abi
except (CompileError, LinkError):
last_err = 'Unable to determine CXX11 ABI to use with TensorFlow (see error above).'
except Exception:
last_err = 'Unable to determine CXX11 ABI to use with TensorFlow. ' \
'Last error:\n\n%s' % traceback.format_exc()
raise DistutilsPlatformError(last_err)
def get_tf_flags(build_ext, cpp_flags):
import tensorflow as tf
try:
return tf.sysconfig.get_compile_flags(), tf.sysconfig.get_link_flags()
except AttributeError:
tf_include_dirs = get_tf_include_dirs()
tf_lib_dirs = get_tf_lib_dirs()
tf_libs = get_tf_libs(build_ext, tf_lib_dirs, cpp_flags)
tf_abi = get_tf_abi(build_ext, tf_include_dirs,
tf_lib_dirs, tf_libs, cpp_flags)
compile_flags = []
for include_dir in tf_include_dirs:
compile_flags.append('-I%s' % include_dir)
if tf_abi:
compile_flags.append('-D%s=%s' % tf_abi)
link_flags = []
for lib_dir in tf_lib_dirs:
link_flags.append('-L%s' % lib_dir)
for lib in tf_libs:
link_flags.append('-l%s' % lib)
return compile_flags, link_flags
def get_mpi_flags():
show_command = os.environ.get('HOROVOD_MPICXX_SHOW', 'mpicxx -show')
try:
mpi_show_output = subprocess.check_output(
shlex.split(show_command), universal_newlines=True).strip()
mpi_show_args = shlex.split(mpi_show_output)
if not mpi_show_args[0].startswith('-'):
mpi_show_args = mpi_show_args[1:]
return ' '.join(['"' + arg.replace('"', '"\'"\'"') + '"'
for arg in mpi_show_args])
except Exception:
raise DistutilsPlatformError(
'%s failed (see error below), is MPI in $PATH?\n'
'Note: If your version of MPI has a custom command to show compilation flags, '
'please specify it with the HOROVOD_MPICXX_SHOW environment variable.\n\n'
'%s' % (show_command, traceback.format_exc()))
def test_compile(build_ext, name, code, libraries=None, include_dirs=None, library_dirs=None, macros=None,
extra_preargs=None):
test_compile_dir = os.path.join(build_ext.build_temp, 'test_compile')
if not os.path.exists(test_compile_dir):
os.makedirs(test_compile_dir)
source_file = os.path.join(test_compile_dir, '%s.cc' % name)
with open(source_file, 'w') as f:
f.write(code)
compiler = build_ext.compiler
[object_file] = compiler.object_filenames([source_file])
shared_object_file = compiler.shared_object_filename(
name, output_dir=test_compile_dir)
compiler.compile([source_file], extra_preargs=extra_preargs,
include_dirs=include_dirs, macros=macros)
compiler.link_shared_object(
[object_file], shared_object_file, libraries=libraries, library_dirs=library_dirs)
return shared_object_file
def get_cuda_dirs(build_ext, cpp_flags):
cuda_include_dirs = []
cuda_lib_dirs = []
cuda_home = os.environ.get('HOROVOD_CUDA_HOME')
if cuda_home:
cuda_include_dirs += ['%s/include' % cuda_home]
cuda_lib_dirs += ['%s/lib' % cuda_home, '%s/lib64' % cuda_home]
cuda_include = os.environ.get('HOROVOD_CUDA_INCLUDE')
if cuda_include:
cuda_include_dirs += [cuda_include]
cuda_lib = os.environ.get('HOROVOD_CUDA_LIB')
if cuda_lib:
cuda_lib_dirs += [cuda_lib]
if not cuda_include_dirs and not cuda_lib_dirs:
cuda_include_dirs += ['/usr/local/cuda/include']
cuda_lib_dirs += ['/usr/local/cuda/lib', '/usr/local/cuda/lib64']
try:
test_compile(build_ext, 'test_cuda', libraries=['cudart'], include_dirs=cuda_include_dirs,
library_dirs=cuda_lib_dirs, extra_preargs=cpp_flags, code=textwrap.dedent('''\
#include <cuda_runtime.h>
void test() {
cudaSetDevice(0);
}
'''))
except (CompileError, LinkError):
raise DistutilsPlatformError(
'CUDA library was not found (see error above).\n'
'Please specify correct CUDA location with the HOROVOD_CUDA_HOME '
'environment variable or combination of HOROVOD_CUDA_INCLUDE and '
'HOROVOD_CUDA_LIB environment variables.\n\n'
'HOROVOD_CUDA_HOME - path where CUDA include and lib directories can be found\n'
'HOROVOD_CUDA_INCLUDE - path to CUDA include directory\n'
'HOROVOD_CUDA_LIB - path to CUDA lib directory')
return cuda_include_dirs, cuda_lib_dirs
def get_nccl_vals(build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags):
nccl_include_dirs = []
nccl_lib_dirs = []
nccl_libs = []
nccl_home = os.environ.get('HOROVOD_NCCL_HOME')
if nccl_home:
nccl_include_dirs += ['%s/include' % nccl_home]
nccl_lib_dirs += ['%s/lib' % nccl_home, '%s/lib64' % nccl_home]
nccl_include_dir = os.environ.get('HOROVOD_NCCL_INCLUDE')
if nccl_include_dir:
nccl_include_dirs += [nccl_include_dir]
nccl_lib_dir = os.environ.get('HOROVOD_NCCL_LIB')
if nccl_lib_dir:
nccl_lib_dirs += [nccl_lib_dir]
nccl_link_mode = os.environ.get('HOROVOD_NCCL_LINK', 'STATIC')
if nccl_link_mode.upper() == 'SHARED':
nccl_libs += ['nccl']
else:
nccl_libs += ['nccl_static']
try:
test_compile(build_ext, 'test_nccl', libraries=nccl_libs, include_dirs=nccl_include_dirs + cuda_include_dirs,
library_dirs=nccl_lib_dirs + cuda_lib_dirs, extra_preargs=cpp_flags, code=textwrap.dedent('''\
#include <nccl.h>
#if NCCL_MAJOR < 2
#error Horovod requires NCCL 2.0 or later version, please upgrade.
#endif
void test() {
ncclUniqueId nccl_id;
ncclGetUniqueId(&nccl_id);
}
'''))
except (CompileError, LinkError):
raise DistutilsPlatformError(
'NCCL 2.0 library or its later version was not found (see error above).\n'
'Please specify correct NCCL location with the HOROVOD_NCCL_HOME '
'environment variable or combination of HOROVOD_NCCL_INCLUDE and '
'HOROVOD_NCCL_LIB environment variables.\n\n'
'HOROVOD_NCCL_HOME - path where NCCL include and lib directories can be found\n'
'HOROVOD_NCCL_INCLUDE - path to NCCL include directory\n'
'HOROVOD_NCCL_LIB - path to NCCL lib directory')
return nccl_include_dirs, nccl_lib_dirs, nccl_libs
def get_ddl_dirs():
ddl_home = '/opt/DL/ddl'
ddl_include_dir = '%s/include' % ddl_home
ddl_lib_dir = '%s/lib' % ddl_home
if not os.path.exists(ddl_lib_dir):
raise DistutilsPlatformError('DDL lib was not found. Please, make sure \'ddl\' package is installed.')
if not os.path.exists(ddl_include_dir):
raise DistutilsPlatformError('DDL include was not found. Please, make sure \'ddl-dev\' package is installed.')
return [ddl_include_dir], [ddl_lib_dir]
def get_common_options(build_ext):
cpp_flags = get_cpp_flags(build_ext)
mpi_flags = get_mpi_flags()
gpu_allreduce = os.environ.get('HOROVOD_GPU_ALLREDUCE')
if gpu_allreduce and gpu_allreduce != 'MPI' and gpu_allreduce != 'NCCL' and \
gpu_allreduce != 'DDL':
raise DistutilsError('HOROVOD_GPU_ALLREDUCE=%s is invalid, supported '
'values are "", "MPI", "NCCL", "DDL".' % gpu_allreduce)
gpu_allgather = os.environ.get('HOROVOD_GPU_ALLGATHER')
if gpu_allgather and gpu_allgather != 'MPI':
raise DistutilsError('HOROVOD_GPU_ALLGATHER=%s is invalid, supported '
'values are "", "MPI".' % gpu_allgather)
gpu_broadcast = os.environ.get('HOROVOD_GPU_BROADCAST')
if gpu_broadcast and gpu_broadcast != 'MPI':
raise DistutilsError('HOROVOD_GPU_BROADCAST=%s is invalid, supported '
'values are "", "MPI".' % gpu_broadcast)
if gpu_allreduce or gpu_allgather or gpu_broadcast:
have_cuda = True
cuda_include_dirs, cuda_lib_dirs = get_cuda_dirs(build_ext, cpp_flags)
else:
have_cuda = False
cuda_include_dirs = cuda_lib_dirs = []
if gpu_allreduce == 'NCCL':
have_nccl = True
nccl_include_dirs, nccl_lib_dirs, nccl_libs = get_nccl_vals(
build_ext, cuda_include_dirs, cuda_lib_dirs, cpp_flags)
else:
have_nccl = False
nccl_include_dirs = nccl_lib_dirs = nccl_libs = []
if gpu_allreduce == 'DDL':
have_ddl = True
ddl_include_dirs, ddl_lib_dirs = get_ddl_dirs()
else:
have_ddl = False
ddl_include_dirs = ddl_lib_dirs = []
MACROS = []
INCLUDES = []
SOURCES = []
COMPILE_FLAGS = cpp_flags + shlex.split(mpi_flags)
LINK_FLAGS = shlex.split(mpi_flags)
LIBRARY_DIRS = []
LIBRARIES = []
if have_cuda:
MACROS += [('HAVE_CUDA', '1')]
INCLUDES += cuda_include_dirs
LIBRARY_DIRS += cuda_lib_dirs
LIBRARIES += ['cudart']
if have_nccl:
MACROS += [('HAVE_NCCL', '1')]
INCLUDES += nccl_include_dirs
LINK_FLAGS += ['-Wl,--version-script=hide_nccl.lds']
LIBRARY_DIRS += nccl_lib_dirs
LIBRARIES += nccl_libs
if have_ddl:
MACROS += [('HAVE_DDL', '1')]
INCLUDES += ddl_include_dirs
LIBRARY_DIRS += ddl_lib_dirs
LIBRARIES += ['ddl', 'ddl_pack']
if gpu_allreduce:
MACROS += [('HOROVOD_GPU_ALLREDUCE', "'%s'" % gpu_allreduce[0])]
if gpu_allgather:
MACROS += [('HOROVOD_GPU_ALLGATHER', "'%s'" % gpu_allgather[0])]
if gpu_broadcast:
MACROS += [('HOROVOD_GPU_BROADCAST', "'%s'" % gpu_broadcast[0])]
return dict(MACROS=MACROS,
INCLUDES=INCLUDES,
SOURCES=SOURCES,
COMPILE_FLAGS=COMPILE_FLAGS,
LINK_FLAGS=LINK_FLAGS,
LIBRARY_DIRS=LIBRARY_DIRS,
LIBRARIES=LIBRARIES)
def build_common_extension(build_ext, options, abi_compile_flags):
common_mpi_lib.define_macros = options['MACROS']
common_mpi_lib.include_dirs = options['INCLUDES']
common_mpi_lib.sources = options['SOURCES'] + ['horovod/common/common.cc',
'horovod/common/mpi_message.cc',
'horovod/common/operations.cc',
'horovod/common/timeline.cc']
common_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + \
abi_compile_flags
common_mpi_lib.extra_link_args = options['LINK_FLAGS']
common_mpi_lib.library_dirs = options['LIBRARY_DIRS']
common_mpi_lib.libraries = options['LIBRARIES']
build_ext.build_extension(common_mpi_lib)
def build_tf_extension(build_ext, options):
check_tf_version()
tf_compile_flags, tf_link_flags = get_tf_flags(
build_ext, options['COMPILE_FLAGS'])
tensorflow_mpi_lib.define_macros = options['MACROS']
tensorflow_mpi_lib.include_dirs = options['INCLUDES']
tensorflow_mpi_lib.sources = options['SOURCES'] + \
['horovod/tensorflow/mpi_ops.cc']
tensorflow_mpi_lib.extra_compile_args = options['COMPILE_FLAGS'] + \
tf_compile_flags
tensorflow_mpi_lib.extra_link_args = options['LINK_FLAGS'] + tf_link_flags
tensorflow_mpi_lib.library_dirs = options['LIBRARY_DIRS']
tensorflow_mpi_lib.libraries = options['LIBRARIES']
build_ext.build_extension(tensorflow_mpi_lib)
return [flag for flag in tf_compile_flags if '_GLIBCXX_USE_CXX11_ABI' in flag]
def dummy_import_torch():
try:
import torch
except:
pass
def check_torch_import():
try:
import torch
except ImportError:
raise DistutilsPlatformError(
'import torch failed, is it installed?\n\n%s' % traceback.format_exc())
def is_torch_cuda():
try:
from torch.utils.ffi import create_extension
cuda_test_ext = create_extension(
name='horovod.torch.test_cuda',
headers=['horovod/torch/dummy.h'],
sources=[],
with_cuda=True,
extra_compile_args=['-std=c11', '-fPIC', '-O2']
)
cuda_test_ext.build()
return True
except:
print('INFO: Above error indicates that this PyTorch installation does not support CUDA.')
return False
def check_macro(macros, key):
return any(k == key and v for k, v in macros)
def set_macro(macros, key, new_value):
if any(k == key for k, _ in macros):
return [(k, new_value if k == key else v) for k, v in macros]
else:
return macros + [(key, new_value)]
class protect_files(object):
def __init__(self, *files):
self.files = files
def __enter__(self):
for file in self.files:
os.rename(file, file + '.protected')
def __exit__(self, type, value, traceback):
for file in self.files:
os.rename(file + '.protected', file)
def build_torch_extension(build_ext, options, abi_compile_flags):
check_torch_import()
have_cuda = is_torch_cuda()
if not have_cuda and check_macro(options['MACROS'], 'HAVE_CUDA'):
raise DistutilsPlatformError(
'Horovod build with GPU support was requested, but this PyTorch '
'installation does not support CUDA.')
updated_macros = set_macro(
options['MACROS'], 'HAVE_CUDA', str(int(have_cuda)))
with protect_files('horovod/torch/mpi_lib/__init__.py',
'horovod/torch/mpi_lib_impl/__init__.py'):
from torch.utils.ffi import create_extension
ffi_iface = create_extension(
name='horovod.torch.mpi_lib',
headers=['horovod/torch/interface.h'] +
(['horovod/torch/interface_cuda.h'] if have_cuda else []),
with_cuda=have_cuda,
language='c',
package=True,
sources=[],
extra_compile_args=['-std=c11', '-fPIC', '-O2']
)
ffi_impl = create_extension(
name='horovod.torch.mpi_lib_impl',
headers=[],
with_cuda=have_cuda,
language='c++',
package=True,
source_extension='.cc',
define_macros=updated_macros,
include_dirs=options['INCLUDES'],
sources=options['SOURCES'] + ['horovod/torch/mpi_ops.cc',
'horovod/torch/handle_manager.cc',
'horovod/torch/ready_event.cc',
'horovod/torch/tensor_util.cc',
'horovod/torch/cuda_util.cc',
'horovod/torch/adapter.cc'],
extra_compile_args=options['COMPILE_FLAGS'] + abi_compile_flags,
extra_link_args=options['LINK_FLAGS'],
library_dirs=options['LIBRARY_DIRS'],
libraries=options['LIBRARIES']
)
for ffi, setuptools_ext in [(ffi_iface, torch_mpi_lib),
(ffi_impl, torch_mpi_lib_impl)]:
ffi_ext = ffi.distutils_extension()
for k, v in ffi_ext.__dict__.items():
setuptools_ext.__dict__[k] = v
build_ext.build_extension(setuptools_ext)
class custom_build_ext(build_ext):
def build_extensions(self):
options = get_common_options(self)
abi_compile_flags = []
built_plugins = []
dummy_import_torch()
if not os.environ.get('HOROVOD_WITHOUT_TENSORFLOW'):
try:
abi_compile_flags = build_tf_extension(self, options)
built_plugins.append(True)
except:
if not os.environ.get('HOROVOD_WITH_TENSORFLOW'):
print('INFO: Unable to build TensorFlow plugin, will skip it.\n\n'
'%s' % traceback.format_exc(), file=sys.stderr)
built_plugins.append(False)
else:
raise
if not os.environ.get('HOROVOD_WITHOUT_PYTORCH'):
try:
build_torch_extension(self, options, abi_compile_flags)
built_plugins.append(True)
except:
if not os.environ.get('HOROVOD_WITH_PYTORCH'):
print('INFO: Unable to build PyTorch plugin, will skip it.\n\n'
'%s' % traceback.format_exc(), file=sys.stderr)
built_plugins.append(False)
else:
raise
if not built_plugins:
raise DistutilsError(
'Both TensorFlow and PyTorch plugins were excluded from build. Aborting.')
if not any(built_plugins):
raise DistutilsError(
'Neither TensorFlow nor PyTorch plugins were built. See errors above.')
build_common_extension(self, options, abi_compile_flags)
setup(name='horovod',
version=__version__,
packages=find_packages(),
description='Distributed training framework for TensorFlow, Keras, and PyTorch.',
author='Uber Technologies, Inc.',
long_description=textwrap.dedent('''\
Horovod is a distributed training framework for TensorFlow, Keras, and PyTorch.
The goal of Horovod is to make distributed Deep Learning fast and easy to use.'''),
url='https://github.com/uber/horovod',
classifiers=[
'License :: OSI Approved :: Apache Software License'
],
ext_modules=[common_mpi_lib, tensorflow_mpi_lib,
torch_mpi_lib, torch_mpi_lib_impl],
cmdclass={'build_ext': custom_build_ext},
setup_requires=['cffi>=1.4.0'] if is_build_action() else [],
install_requires=['cffi>=1.4.0'],
zip_safe=False)
| true | true |
f73d10c8ba87f317b58df20c51d2dd5afc1f1151 | 220 | py | Python | niyopolymers/patches/import_price_list.py | venku31/niyopolymers | f150ee591d2ea10720d8e98c5f6abf7c6e2edb2d | [
"MIT"
] | null | null | null | niyopolymers/patches/import_price_list.py | venku31/niyopolymers | f150ee591d2ea10720d8e98c5f6abf7c6e2edb2d | [
"MIT"
] | null | null | null | niyopolymers/patches/import_price_list.py | venku31/niyopolymers | f150ee591d2ea10720d8e98c5f6abf7c6e2edb2d | [
"MIT"
] | null | null | null | import frappe
def execute():
path = frappe.get_app_path("niyopolymers", "patches", "imports", "price_list.csv")
frappe.core.doctype.data_import.data_import.import_file("Price List", path, "Insert", console=True) | 44 | 103 | 0.740909 | import frappe
def execute():
path = frappe.get_app_path("niyopolymers", "patches", "imports", "price_list.csv")
frappe.core.doctype.data_import.data_import.import_file("Price List", path, "Insert", console=True) | true | true |
f73d10fe1ebd52dc14f7892bc7bc987383890531 | 3,140 | py | Python | chj/app/ObjectSize.py | kestreltechnology/CodeHawk-Java | 7c218f67161bbe1a8aa2cc70f16d28e032957464 | [
"MIT"
] | null | null | null | chj/app/ObjectSize.py | kestreltechnology/CodeHawk-Java | 7c218f67161bbe1a8aa2cc70f16d28e032957464 | [
"MIT"
] | 1 | 2020-08-01T04:20:13.000Z | 2020-08-01T04:20:13.000Z | chj/app/ObjectSize.py | kestreltechnology/CodeHawk-Java | 7c218f67161bbe1a8aa2cc70f16d28e032957464 | [
"MIT"
] | 3 | 2020-07-25T22:55:03.000Z | 2021-10-20T08:18:39.000Z | # ------------------------------------------------------------------------------
# CodeHawk Java Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016-2020 Kestrel Technology LLC
# Copyright (c) 2021 Andrew McGraw
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from chj.app.JavaClass import JavaClass
from chj.index.FieldSignature import FieldSignature
from chj.index.FieldSignature import ClassFieldSignature
from chj.index.JObjectTypes import JObjectTypeBase
class ObjectSize(object):
def __init__(self, jclass: "JavaClass"):
self.jclass = jclass # JavaClass
self.jd = jclass.jd # DataDictionary
self.scalar = 0
self.objects: List["JObjectTypeBase"] = []
self.arrays: List["FieldSignature"] = [] # [ FieldSignature ]
def add_scalar(self, s: int) -> None:
self.scalar += s
def add_object(self, cnix: "JObjectTypeBase") -> None:
self.objects.append(cnix)
def add_array(self, arr: "FieldSignature") -> None:
self.arrays.append(arr)
def add_field(self, fsig: "FieldSignature") -> None:
self.add_scalar(fsig.get_scalar_size())
if fsig.is_object(): self.add_object(fsig.get_object_type())
if fsig.is_array(): self.add_array(fsig)
def add_object_size(self, other: "ObjectSize") -> None:
self.scalar += other.scalar
self.objects.extend(other.objects)
self.arrays.extend(other.arrays)
def to_string(self) -> str:
lines = []
lines.append(' scalar size: ' + str(self.scalar))
if len(self.objects) > 0:
pObjs = ', '.join([str(x) for x in self.objects])
lines.append(' objects : ' + pObjs)
if len(self.arrays) > 0:
lines.append(' arrays : ' + str(len(self.arrays)))
return '\n'.join(lines)
| 42.432432 | 80 | 0.626433 |
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from chj.app.JavaClass import JavaClass
from chj.index.FieldSignature import FieldSignature
from chj.index.FieldSignature import ClassFieldSignature
from chj.index.JObjectTypes import JObjectTypeBase
class ObjectSize(object):
def __init__(self, jclass: "JavaClass"):
self.jclass = jclass
self.jd = jclass.jd
self.scalar = 0
self.objects: List["JObjectTypeBase"] = []
self.arrays: List["FieldSignature"] = []
def add_scalar(self, s: int) -> None:
self.scalar += s
def add_object(self, cnix: "JObjectTypeBase") -> None:
self.objects.append(cnix)
def add_array(self, arr: "FieldSignature") -> None:
self.arrays.append(arr)
def add_field(self, fsig: "FieldSignature") -> None:
self.add_scalar(fsig.get_scalar_size())
if fsig.is_object(): self.add_object(fsig.get_object_type())
if fsig.is_array(): self.add_array(fsig)
def add_object_size(self, other: "ObjectSize") -> None:
self.scalar += other.scalar
self.objects.extend(other.objects)
self.arrays.extend(other.arrays)
def to_string(self) -> str:
lines = []
lines.append(' scalar size: ' + str(self.scalar))
if len(self.objects) > 0:
pObjs = ', '.join([str(x) for x in self.objects])
lines.append(' objects : ' + pObjs)
if len(self.arrays) > 0:
lines.append(' arrays : ' + str(len(self.arrays)))
return '\n'.join(lines)
| true | true |
f73d11124216d1c078e19899f86739d1c526bec5 | 2,307 | py | Python | Protheus_WebApp/Modules/SIGAJURI/JURA108TestCase.py | HelenaAdrignoli/tir-script-samples | bb4f4ab3a49f723216c93f66a4395e5aa328b846 | [
"MIT"
] | 1 | 2021-12-07T18:16:27.000Z | 2021-12-07T18:16:27.000Z | Modules/SIGAJURI/JURA108TestCase.py | ccpn1988/TIR | fede6db280b35564b73e14a4588451dd88288bab | [
"MIT"
] | null | null | null | Modules/SIGAJURI/JURA108TestCase.py | ccpn1988/TIR | fede6db280b35564b73e14a4588451dd88288bab | [
"MIT"
] | null | null | null | from tir import Webapp
import unittest
class JURA108(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.SetTIRConfig(config_name="user", value="daniel.frodrigues")
inst.oHelper.SetTIRConfig(config_name="password", value="1")
inst.oHelper.Setup('SIGAJURI','','T1','D MG 01 ','76')
inst.oHelper.Program('JURA106')
inst.oHelper.AddParameter("MV_JHBPESF", "", "1", "", "")
inst.oHelper.SetParameters()
def test_JURA108_CT001(self):
self.oHelper.SetValue("cValor","Contencioso - Fup",name_attr=True)
self.oHelper.WaitFieldValue("NTA_CTIPO","")
self.oHelper.SetValue('NTA_CTIPO', '00001')
self.oHelper.ClickLabel('Pesquisar')
self.oHelper.ClickGridCell("Código Assunto Jurídico",row=1)
self.oHelper.ClickLabel('Exportação Personalizada')
self.oHelper.SetValue("cCmbTabela","000011 - Acordos (NYP001)",name_attr=True)
self.oHelper.SetButton("Add. Todos >>")
self.oHelper.SetButton("Sim")
self.oHelper.SetButton("<< Rem. Todos")
self.oHelper.SetButton("Sim")
self.oHelper.SetValue("cCmbTabela","000005 - Follow-ups (NTA001)",name_attr=True)
self.oHelper.SetValue("cGetSearch","HORA",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetValue("cGetSearch","DT FOLLOW-UP",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetValue("cGetSearch","NOME DO PARTICI",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetButton("Mover para Baixo")
self.oHelper.SetButton("Mover para Cima")
self.oHelper.SetValue("cGetRename","Hora F-Up",name_attr=True)
self.oHelper.SetButton("Renomear")
self.oHelper.SetButton("<< Remove")
self.oHelper.SetButton("Filt. Agrup.")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Exportar")
self.oHelper.SetValue("cGetNewConfig","FOLLOW UP - MULTIPLOS RESPONSAVEIS",name_attr=True)
self.oHelper.SetButton("Salvar Como")
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Fechar")
self.oHelper.SetButton("Sair")
self.oHelper.ClickLabel("Sair")
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == '__main__':
unittest.main()
| 38.45 | 92 | 0.739922 | from tir import Webapp
import unittest
class JURA108(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.SetTIRConfig(config_name="user", value="daniel.frodrigues")
inst.oHelper.SetTIRConfig(config_name="password", value="1")
inst.oHelper.Setup('SIGAJURI','','T1','D MG 01 ','76')
inst.oHelper.Program('JURA106')
inst.oHelper.AddParameter("MV_JHBPESF", "", "1", "", "")
inst.oHelper.SetParameters()
def test_JURA108_CT001(self):
self.oHelper.SetValue("cValor","Contencioso - Fup",name_attr=True)
self.oHelper.WaitFieldValue("NTA_CTIPO","")
self.oHelper.SetValue('NTA_CTIPO', '00001')
self.oHelper.ClickLabel('Pesquisar')
self.oHelper.ClickGridCell("Código Assunto Jurídico",row=1)
self.oHelper.ClickLabel('Exportação Personalizada')
self.oHelper.SetValue("cCmbTabela","000011 - Acordos (NYP001)",name_attr=True)
self.oHelper.SetButton("Add. Todos >>")
self.oHelper.SetButton("Sim")
self.oHelper.SetButton("<< Rem. Todos")
self.oHelper.SetButton("Sim")
self.oHelper.SetValue("cCmbTabela","000005 - Follow-ups (NTA001)",name_attr=True)
self.oHelper.SetValue("cGetSearch","HORA",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetValue("cGetSearch","DT FOLLOW-UP",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetValue("cGetSearch","NOME DO PARTICI",name_attr=True)
self.oHelper.SetButton("Pesquisar")
self.oHelper.SetButton("Adicionar >>")
self.oHelper.SetButton("Mover para Baixo")
self.oHelper.SetButton("Mover para Cima")
self.oHelper.SetValue("cGetRename","Hora F-Up",name_attr=True)
self.oHelper.SetButton("Renomear")
self.oHelper.SetButton("<< Remove")
self.oHelper.SetButton("Filt. Agrup.")
self.oHelper.SetButton("Ok")
self.oHelper.SetButton("Exportar")
self.oHelper.SetValue("cGetNewConfig","FOLLOW UP - MULTIPLOS RESPONSAVEIS",name_attr=True)
self.oHelper.SetButton("Salvar Como")
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Fechar")
self.oHelper.SetButton("Sair")
self.oHelper.ClickLabel("Sair")
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == '__main__':
unittest.main()
| true | true |
f73d121b6b27b06f03eb59a172064c7c0937ce64 | 6,791 | py | Python | lib/moerdergraphall.py | orithena/moerderspiel | 7a11598cd80f26824376207805d3a937b9c1d831 | [
"MIT"
] | 16 | 2015-09-30T13:45:07.000Z | 2022-01-31T16:45:50.000Z | lib/moerdergraphall.py | orithena/moerderspiel | 7a11598cd80f26824376207805d3a937b9c1d831 | [
"MIT"
] | 5 | 2015-09-16T13:44:08.000Z | 2018-02-06T11:24:59.000Z | lib/moerdergraphall.py | orithena/moerderspiel | 7a11598cd80f26824376207805d3a937b9c1d831 | [
"MIT"
] | 4 | 2015-05-28T17:39:55.000Z | 2022-01-29T01:27:42.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os.path
import yapgvb as graph
from yapgvb import RenderingContext, CLIRenderError
import textwrap
import math
import colorsys
import pickle
from moerderklassen import *
from utils import colorgen
import utils
class MyRenderingContext(RenderingContext):
""" Used only if yapgvb does not use libboost.
W/o libboost, this class is used to modify the parameters passed to graphviz/dot.
"""
def render(self, graph, output_type, destfile):
from yapgvb import tempfile
if isinstance(destfile,file):
filename = destfile.name
destfile.close()
elif isinstance(destfile,str):
filename = destfile
else:
raise Exception
temp = tempfile('.dot')
graph._write_dot(temp)
cmd = "%s -Gsize=100,50 -T%s -o%s %s" % (self._engine_executable, output_type, filename, temp)
ier = os.system(cmd)
if ier:
#check_graphviz_working()
raise CLIRenderError("Error code %s rendering %s" % (ier, temp))
#os.remove(temp)
def moerdergraphall(game, filename, alledges=False, nodefontsize=8.0, edgefontsize=8.0, rounds=None):
if rounds is None:
rounds = game.rounds.values()
elif type(rounds) is not list:
rounds = [rounds]
# G is the main Graph object
G = graph.Digraph("Moerder")
G.model = 'subset'
G.overlap = 'compress'
G.splines = True
G.normalize = True
G.packmode = 'graph'
G.rankdir = 'LR'
# a dict for indexing all nodes
nodes = {}
# we need to keep some of the nodes in mind
prev_node = first_node = node = None
# make a copy of the participant list so we don't jumble up the original list
participants = sorted(rounds, key=lambda x: len(x.participants))[-1].participants[:]
gmnode = G.add_node('Game Master')
gmnode.label = 'Game Master'
gmnode.fontsize = nodefontsize
gmnode.fontname = 'arial'
gmnode.color = 'gray'
gmnode.fontcolor = 'gray'
gmnode.style = 'rounded'
hnode = inode = G.add_node('invisible')
inode.style = 'invisible'
inode.pos = (0.0, 0.0)
if len(participants) > 120:
sorrynode = G.add_node(u'Sorry, zu viele Nodes in diesem Graph...')
sorrynode.label = u'Sorry, zu viele Nodes in diesem Graph...'
sorrynode.style = 'rounded,filled'
sorrynode.fontsize = nodefontsize
sorrynode.style = 'rounded,filled'
sorrynode.penwidth = 2
sorrynode.color = '#00003380'
sorrynode.fillcolor = '#FFFFFF00'
sorrynode.margin = 0.01
# do the layout math and save to file
if graph.__dict__.has_key('_yapgvb_py'):
# if yapgvb works in python-only mode
rc = MyRenderingContext()
G.layout(graph.engines.dot, rendering_context=rc)
G.render(filename, rendering_context=rc)
else:
# if yapgvb has libboost support compiled in
G.layout(graph.engines.dot)
G.render(filename)
return
massmurderers = game.getMassMurderer()
massmurdererlist = [ player.public_id for player in massmurderers['killers'] ] if len(massmurderers) > 0 else []
if not alledges:
# if not admin/gameover view: sort nodes prior to adding them to the graph
participants.sort(key = lambda p: p.player.name + p.player.info)
nodecount = len(participants)
nodesperline = math.trunc(math.sqrt(nodecount))
# for each participant, add a node to the graph bearing his name
nodenumber = 0
for participant in participants:
nodenumber += 1
name = participant.player.name
if len(participant.player.info) > 0:
name += "\\n" + participant.player.info
name = utils.dotescape(name)
node = G.add_node(participant.player.public_id)
node.label = name.encode('utf-8')
node.fontsize = nodefontsize
node.style = 'rounded,filled'
node.penwidth = 2
node.color = '#00003380'
node.fillcolor = '#FFFFFF00'
node.margin = 0.01
nodeweight = game.getDeathsCount(participant) + game.getKillsCount(participant)
#node.group = str(nodeweight)
node.pos = ( nodenumber % nodesperline, nodenumber / nodesperline)
if nodeweight == 0:
iedge = G.add_edge(inode, node)
iedge.style = 'invisible'
iedge.arrowhead = 'none'
iedge.weight = 0.1
node.pos = (0.0, 0.0)
#iedge.constraint = False
if not prev_node:
first_node = node
# put all the nodes into a dict so we could find them fast by the player's id (needed later)
nodes[participant.player.public_id] = node
prev_node = node
node.fontname = 'arial'
# kicked participants are gray
if participant.killed() and participant.killedby.killer is None:
#node.color = '#FF6666FF'
#node.fontcolor = '#33333388'
#node.fillcolor = '#66666622'
node.style += ',dashed'
# mass murderers are black
if participant.player.public_id in massmurdererlist:
node.color = 'black'
node.fillcolor = 'black'
node.fontcolor = 'white'
# dead participants are red
if (game.getDeathsCount(participant) >= len(game.rounds)):
node.color = '#FF0000FF'
node.penwidth = 2
#node.fontcolor = '#FFFFFFFF'
#node.fillcolor = '#FF0000FF'
colorgenerator = colorgen(0.86)
for round in game.rounds.values():
edgecolor = next(colorgenerator)
if round not in rounds:
continue
for participant in round.participants:
if alledges or participant.killed():
edge = G.add_edge(nodes[participant.getInitialKiller().player.public_id], nodes[participant.player.public_id])
edge.color = edgecolor
edge.style = 'dashed'
edge.penwidth = 2
edge.weight = 6.0
#edge.constraint = False
if participant.killed():
if not participant.killedby.killer is None:
# normal case
edge = G.add_edge(nodes[participant.killedby.killer.player.public_id], nodes[participant.player.public_id])
else:
# special case of a game master kill
edge = G.add_edge(gmnode, nodes[participant.player.public_id])
edge.color = edgecolor
edge.fontcolor = 'red'
edge.style = 'solid'
edge.penwidth = 4
edge.weight = 10.0
# set edge label to kill description
label = utils.dateformat(participant.killedby.date) + ":\\n"
maxlinelen = max(24, math.trunc(math.ceil(math.sqrt(6 * len(participant.killedby.reason)))))
label += "\\n".join(textwrap.wrap(participant.killedby.reason, maxlinelen)).replace('"', "'")
edge.label = ''.join([ c for c in label.encode('utf-8') if ord(c) < 2048])
edge.fontsize = edgefontsize
edge.fontname = 'arial'
# do the layout math and save to file
if graph.__dict__.has_key('_yapgvb_py'):
# if yapgvb works in python-only mode
rc = MyRenderingContext()
G.layout(graph.engines.dot, rendering_context=rc)
G.render(filename, rendering_context=rc)
else:
# if yapgvb has libboost support compiled in
G.layout(graph.engines.dot)
G.render(filename)
def _loadgame(gamefile):
input = open(gamefile, 'rd')
ret = pickle.load(input)
input.close()
return ret
if __name__ == "__main__":
import sys
game = _loadgame(sys.argv[1])
moerdergraphall(game, sys.argv[2], alledges=True)
| 33.453202 | 114 | 0.709616 |
import sys
import os.path
import yapgvb as graph
from yapgvb import RenderingContext, CLIRenderError
import textwrap
import math
import colorsys
import pickle
from moerderklassen import *
from utils import colorgen
import utils
class MyRenderingContext(RenderingContext):
def render(self, graph, output_type, destfile):
from yapgvb import tempfile
if isinstance(destfile,file):
filename = destfile.name
destfile.close()
elif isinstance(destfile,str):
filename = destfile
else:
raise Exception
temp = tempfile('.dot')
graph._write_dot(temp)
cmd = "%s -Gsize=100,50 -T%s -o%s %s" % (self._engine_executable, output_type, filename, temp)
ier = os.system(cmd)
if ier:
raise CLIRenderError("Error code %s rendering %s" % (ier, temp))
def moerdergraphall(game, filename, alledges=False, nodefontsize=8.0, edgefontsize=8.0, rounds=None):
if rounds is None:
rounds = game.rounds.values()
elif type(rounds) is not list:
rounds = [rounds]
G = graph.Digraph("Moerder")
G.model = 'subset'
G.overlap = 'compress'
G.splines = True
G.normalize = True
G.packmode = 'graph'
G.rankdir = 'LR'
nodes = {}
prev_node = first_node = node = None
participants = sorted(rounds, key=lambda x: len(x.participants))[-1].participants[:]
gmnode = G.add_node('Game Master')
gmnode.label = 'Game Master'
gmnode.fontsize = nodefontsize
gmnode.fontname = 'arial'
gmnode.color = 'gray'
gmnode.fontcolor = 'gray'
gmnode.style = 'rounded'
hnode = inode = G.add_node('invisible')
inode.style = 'invisible'
inode.pos = (0.0, 0.0)
if len(participants) > 120:
sorrynode = G.add_node(u'Sorry, zu viele Nodes in diesem Graph...')
sorrynode.label = u'Sorry, zu viele Nodes in diesem Graph...'
sorrynode.style = 'rounded,filled'
sorrynode.fontsize = nodefontsize
sorrynode.style = 'rounded,filled'
sorrynode.penwidth = 2
sorrynode.color = '
sorrynode.fillcolor = '
sorrynode.margin = 0.01
# do the layout math and save to file
if graph.__dict__.has_key('_yapgvb_py'):
# if yapgvb works in python-only mode
rc = MyRenderingContext()
G.layout(graph.engines.dot, rendering_context=rc)
G.render(filename, rendering_context=rc)
else:
# if yapgvb has libboost support compiled in
G.layout(graph.engines.dot)
G.render(filename)
return
massmurderers = game.getMassMurderer()
massmurdererlist = [ player.public_id for player in massmurderers['killers'] ] if len(massmurderers) > 0 else []
if not alledges:
# if not admin/gameover view: sort nodes prior to adding them to the graph
participants.sort(key = lambda p: p.player.name + p.player.info)
nodecount = len(participants)
nodesperline = math.trunc(math.sqrt(nodecount))
# for each participant, add a node to the graph bearing his name
nodenumber = 0
for participant in participants:
nodenumber += 1
name = participant.player.name
if len(participant.player.info) > 0:
name += "\\n" + participant.player.info
name = utils.dotescape(name)
node = G.add_node(participant.player.public_id)
node.label = name.encode('utf-8')
node.fontsize = nodefontsize
node.style = 'rounded,filled'
node.penwidth = 2
node.color = '
node.fillcolor = '
node.margin = 0.01
nodeweight = game.getDeathsCount(participant) + game.getKillsCount(participant)
#node.group = str(nodeweight)
node.pos = ( nodenumber % nodesperline, nodenumber / nodesperline)
if nodeweight == 0:
iedge = G.add_edge(inode, node)
iedge.style = 'invisible'
iedge.arrowhead = 'none'
iedge.weight = 0.1
node.pos = (0.0, 0.0)
#iedge.constraint = False
if not prev_node:
first_node = node
# put all the nodes into a dict so we could find them fast by the player's id (needed later)
nodes[participant.player.public_id] = node
prev_node = node
node.fontname = 'arial'
if participant.killed() and participant.killedby.killer is None:
node.style += ',dashed'
if participant.player.public_id in massmurdererlist:
node.color = 'black'
node.fillcolor = 'black'
node.fontcolor = 'white'
if (game.getDeathsCount(participant) >= len(game.rounds)):
node.color = '#FF0000FF'
node.penwidth = 2
colorgenerator = colorgen(0.86)
for round in game.rounds.values():
edgecolor = next(colorgenerator)
if round not in rounds:
continue
for participant in round.participants:
if alledges or participant.killed():
edge = G.add_edge(nodes[participant.getInitialKiller().player.public_id], nodes[participant.player.public_id])
edge.color = edgecolor
edge.style = 'dashed'
edge.penwidth = 2
edge.weight = 6.0
if participant.killed():
if not participant.killedby.killer is None:
edge = G.add_edge(nodes[participant.killedby.killer.player.public_id], nodes[participant.player.public_id])
else:
edge = G.add_edge(gmnode, nodes[participant.player.public_id])
edge.color = edgecolor
edge.fontcolor = 'red'
edge.style = 'solid'
edge.penwidth = 4
edge.weight = 10.0
label = utils.dateformat(participant.killedby.date) + ":\\n"
maxlinelen = max(24, math.trunc(math.ceil(math.sqrt(6 * len(participant.killedby.reason)))))
label += "\\n".join(textwrap.wrap(participant.killedby.reason, maxlinelen)).replace('"', "'")
edge.label = ''.join([ c for c in label.encode('utf-8') if ord(c) < 2048])
edge.fontsize = edgefontsize
edge.fontname = 'arial'
# do the layout math and save to file
if graph.__dict__.has_key('_yapgvb_py'):
# if yapgvb works in python-only mode
rc = MyRenderingContext()
G.layout(graph.engines.dot, rendering_context=rc)
G.render(filename, rendering_context=rc)
else:
# if yapgvb has libboost support compiled in
G.layout(graph.engines.dot)
G.render(filename)
def _loadgame(gamefile):
input = open(gamefile, 'rd')
ret = pickle.load(input)
input.close()
return ret
if __name__ == "__main__":
import sys
game = _loadgame(sys.argv[1])
moerdergraphall(game, sys.argv[2], alledges=True)
| true | true |
f73d133f1804d0833d771530b775e1da1e558e30 | 853 | py | Python | src.py | duldiev/Assignment-2-Scrapping | a9dbb4bb14b7fe0a1c5ec6eba73491008ff8da52 | [
"MIT"
] | null | null | null | src.py | duldiev/Assignment-2-Scrapping | a9dbb4bb14b7fe0a1c5ec6eba73491008ff8da52 | [
"MIT"
] | null | null | null | src.py | duldiev/Assignment-2-Scrapping | a9dbb4bb14b7fe0a1c5ec6eba73491008ff8da52 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup as soup
from selenium import webdriver
class Scrapper:
def getArticles(self, cryptoName):
url = 'https://coinmarketcap.com/currencies/' + cryptoName + '/news/'
driver = webdriver.Firefox()
driver.get(url)
page = driver.page_source
page_soup = soup(page, 'html.parser')
headers = page_soup.findAll("h3", {"class": "sc-1q9q90x-0", "class": "gEZmSc"})
paragraphs = page_soup.findAll("p", {"class": "sc-1eb5slv-0", "class": "svowul-3", "class": "ddtKCV"})
print('Latest news about', cryptoName.capitalize(), end=':')
print()
for i in range(0, min(len(headers), len(paragraphs))):
print('Article', (i + 1), end=':')
print()
print(headers[i].text.strip(), '\n', 'More:', paragraphs[i].text.strip(), '\n') | 38.772727 | 110 | 0.588511 | from bs4 import BeautifulSoup as soup
from selenium import webdriver
class Scrapper:
def getArticles(self, cryptoName):
url = 'https://coinmarketcap.com/currencies/' + cryptoName + '/news/'
driver = webdriver.Firefox()
driver.get(url)
page = driver.page_source
page_soup = soup(page, 'html.parser')
headers = page_soup.findAll("h3", {"class": "sc-1q9q90x-0", "class": "gEZmSc"})
paragraphs = page_soup.findAll("p", {"class": "sc-1eb5slv-0", "class": "svowul-3", "class": "ddtKCV"})
print('Latest news about', cryptoName.capitalize(), end=':')
print()
for i in range(0, min(len(headers), len(paragraphs))):
print('Article', (i + 1), end=':')
print()
print(headers[i].text.strip(), '\n', 'More:', paragraphs[i].text.strip(), '\n') | true | true |
f73d13f885dbff2583adeceb1df3697205245259 | 367 | py | Python | main_app/forms.py | Blankphrase/social_reach | 2dd49799c4f659d823c4d50ac6b79fe58f0bbddc | [
"MIT"
] | null | null | null | main_app/forms.py | Blankphrase/social_reach | 2dd49799c4f659d823c4d50ac6b79fe58f0bbddc | [
"MIT"
] | 3 | 2020-06-05T18:56:01.000Z | 2021-06-10T20:48:50.000Z | main_app/forms.py | Blankphrase/social_reach | 2dd49799c4f659d823c4d50ac6b79fe58f0bbddc | [
"MIT"
] | 1 | 2018-09-11T08:48:43.000Z | 2018-09-11T08:48:43.000Z | from django import forms
class UserSignupForm(forms.Form):
first_name = forms.CharField(max_length=30, label='First Name')
last_name = forms.CharField(max_length=30, label='Last Name')
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| 30.583333 | 67 | 0.700272 | from django import forms
class UserSignupForm(forms.Form):
first_name = forms.CharField(max_length=30, label='First Name')
last_name = forms.CharField(max_length=30, label='Last Name')
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| true | true |
f73d14dc5923df77021909a76535feb2c4b6de4b | 4,238 | py | Python | src/fhir_helpers/utils.py | anthem-ai/fhir-helpers | 803653f7ef64dbb41bdc24b06cb16425964058aa | [
"Apache-2.0"
] | null | null | null | src/fhir_helpers/utils.py | anthem-ai/fhir-helpers | 803653f7ef64dbb41bdc24b06cb16425964058aa | [
"Apache-2.0"
] | null | null | null | src/fhir_helpers/utils.py | anthem-ai/fhir-helpers | 803653f7ef64dbb41bdc24b06cb16425964058aa | [
"Apache-2.0"
] | null | null | null | import operator
from datetime import date, datetime
from typing import Any, Literal, Optional
from dateutil import tz
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from google.protobuf import message
from proto.google.fhir.proto.r4.core import datatypes_pb2
ComparatorType = Literal["=", ">", ">=", "<", "<="]
ComparatorFunc = {
"=": operator.eq,
">": operator.gt,
">=": operator.ge,
"<": operator.lt,
"<=": operator.le,
}
def tz_conv(s: str) -> int:
try:
base = 1
if s[0] == "-":
base = -1
s = s[1:]
parts = [int(x) for x in s.split(":")]
return base * (parts[0] * 3600 + parts[1] * 60)
except Exception:
return 0
# https://stackoverflow.com/questions/5802108/how-to-check-if-a-datetime-object-is-localized-with-pytz
def _is_tz_aware(dt: datetime) -> bool:
return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
def _ensure_tz_aware(dt: datetime) -> datetime:
if not _is_tz_aware(dt):
return dt.replace(tzinfo=tz.tzutc())
return dt
def get_years_since_datetime(resource_date: datetime) -> int:
return relativedelta(
datetime.now(tz=tz.tzutc()), _ensure_tz_aware(resource_date)
).years
def get_years_since_date(resource_date: date) -> int:
return relativedelta(date.today(), resource_date).years
def parse_iso_datetime(str_datetime: str) -> Optional[datetime]:
return _ensure_tz_aware(parse(str_datetime)) if str_datetime else None
def parse_iso_date(str_datetime: str) -> Optional[date]:
return parse(str_datetime).date() if str_datetime else None
def convert_proto_date_time(
proto_datetime: datatypes_pb2.DateTime,
) -> Optional[datetime]:
if not proto_datetime.ByteSize():
return None
return datetime.fromtimestamp(
proto_datetime.value_us / 1_000_000,
tz=tz.tzoffset(None, tz_conv(proto_datetime.timezone)),
)
def convert_proto_date(
proto_date: datatypes_pb2.Date,
) -> Optional[date]:
if not proto_date.ByteSize():
return None
return datetime.fromtimestamp(
proto_date.value_us / 1_000_000,
).date()
def check_after_date(
after_date: datetime,
resource_date: Optional[datetime] = None,
) -> bool:
return (
_ensure_tz_aware(after_date) < _ensure_tz_aware(resource_date)
if resource_date
else False
)
def check_before_date(
before_date: datetime,
resource_date: Optional[datetime] = None,
) -> bool:
return (
_ensure_tz_aware(before_date) > _ensure_tz_aware(resource_date)
if resource_date
else False
)
def ensure_non_null_date(dt: Optional[datetime]) -> datetime:
if not dt:
return datetime(1900, 1, 1, tzinfo=tz.tzutc())
return dt
# Rough calculation, could be improved, but reasonably close
def calc_time_ago(years: int = 0, months: int = 0, days: int = 0) -> datetime:
return datetime.now(tz=tz.tzutc()) - relativedelta(
years=years, months=months, days=days
)
def search(val: Any, search_str: str) -> bool:
if type(val) is dict:
for k, v in val.items():
if search(v, search_str):
return True
if type(val) is list:
for v in val:
if search(v, search_str):
return True
if type(val) is str:
if search_str.lower() in val.lower():
return True
return False
# https://stackoverflow.com/questions/29148391/looping-over-protocol-buffers-attributes-in-python
def search_proto(proto: message.Message, search_str: str) -> bool:
for descriptor, value in proto.ListFields():
if descriptor.type == descriptor.TYPE_MESSAGE:
# Is List
if descriptor.label == descriptor.LABEL_REPEATED:
for val in value:
if search_proto(val, search_str):
return True
# Is Object
elif search_proto(value, search_str):
return True
elif descriptor.type == descriptor.TYPE_STRING and type(value) is str:
if search_str.lower() in value.lower():
return True
return False
| 26.993631 | 102 | 0.646531 | import operator
from datetime import date, datetime
from typing import Any, Literal, Optional
from dateutil import tz
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from google.protobuf import message
from proto.google.fhir.proto.r4.core import datatypes_pb2
ComparatorType = Literal["=", ">", ">=", "<", "<="]
ComparatorFunc = {
"=": operator.eq,
">": operator.gt,
">=": operator.ge,
"<": operator.lt,
"<=": operator.le,
}
def tz_conv(s: str) -> int:
try:
base = 1
if s[0] == "-":
base = -1
s = s[1:]
parts = [int(x) for x in s.split(":")]
return base * (parts[0] * 3600 + parts[1] * 60)
except Exception:
return 0
def _is_tz_aware(dt: datetime) -> bool:
return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
def _ensure_tz_aware(dt: datetime) -> datetime:
if not _is_tz_aware(dt):
return dt.replace(tzinfo=tz.tzutc())
return dt
def get_years_since_datetime(resource_date: datetime) -> int:
return relativedelta(
datetime.now(tz=tz.tzutc()), _ensure_tz_aware(resource_date)
).years
def get_years_since_date(resource_date: date) -> int:
return relativedelta(date.today(), resource_date).years
def parse_iso_datetime(str_datetime: str) -> Optional[datetime]:
return _ensure_tz_aware(parse(str_datetime)) if str_datetime else None
def parse_iso_date(str_datetime: str) -> Optional[date]:
return parse(str_datetime).date() if str_datetime else None
def convert_proto_date_time(
proto_datetime: datatypes_pb2.DateTime,
) -> Optional[datetime]:
if not proto_datetime.ByteSize():
return None
return datetime.fromtimestamp(
proto_datetime.value_us / 1_000_000,
tz=tz.tzoffset(None, tz_conv(proto_datetime.timezone)),
)
def convert_proto_date(
proto_date: datatypes_pb2.Date,
) -> Optional[date]:
if not proto_date.ByteSize():
return None
return datetime.fromtimestamp(
proto_date.value_us / 1_000_000,
).date()
def check_after_date(
after_date: datetime,
resource_date: Optional[datetime] = None,
) -> bool:
return (
_ensure_tz_aware(after_date) < _ensure_tz_aware(resource_date)
if resource_date
else False
)
def check_before_date(
before_date: datetime,
resource_date: Optional[datetime] = None,
) -> bool:
return (
_ensure_tz_aware(before_date) > _ensure_tz_aware(resource_date)
if resource_date
else False
)
def ensure_non_null_date(dt: Optional[datetime]) -> datetime:
if not dt:
return datetime(1900, 1, 1, tzinfo=tz.tzutc())
return dt
def calc_time_ago(years: int = 0, months: int = 0, days: int = 0) -> datetime:
return datetime.now(tz=tz.tzutc()) - relativedelta(
years=years, months=months, days=days
)
def search(val: Any, search_str: str) -> bool:
if type(val) is dict:
for k, v in val.items():
if search(v, search_str):
return True
if type(val) is list:
for v in val:
if search(v, search_str):
return True
if type(val) is str:
if search_str.lower() in val.lower():
return True
return False
def search_proto(proto: message.Message, search_str: str) -> bool:
for descriptor, value in proto.ListFields():
if descriptor.type == descriptor.TYPE_MESSAGE:
if descriptor.label == descriptor.LABEL_REPEATED:
for val in value:
if search_proto(val, search_str):
return True
elif search_proto(value, search_str):
return True
elif descriptor.type == descriptor.TYPE_STRING and type(value) is str:
if search_str.lower() in value.lower():
return True
return False
| true | true |
f73d14e380b950b8366f085b4d0a7302ccc13da9 | 1,435 | py | Python | utils/test.py | Heedeok/OT_deepsort_kalman | 174e3d618a2340c435ff945edffd00ab3d0a7a97 | [
"MIT"
] | 3 | 2021-03-02T05:55:46.000Z | 2021-09-09T05:43:00.000Z | utils/test.py | Heedeok/OT_deepsort_kalman | 174e3d618a2340c435ff945edffd00ab3d0a7a97 | [
"MIT"
] | null | null | null | utils/test.py | Heedeok/OT_deepsort_kalman | 174e3d618a2340c435ff945edffd00ab3d0a7a97 | [
"MIT"
] | null | null | null | '''
Date of modification : 2021.01.22
Code Summary : realsense camera python code 최종 ë²„ì „
Input option 0 : D435i (default)
1 : L515
2 : D445i
'''
#####################################################
## Import ##
#####################################################
import numpy as np
import cv2
from new_utils_RS import Realsense
#####################################################
## etc ##
#####################################################
'''
clear the interpreter console.
Method 1,2
'''
# import os
# os.system('cls||clear')
# print ("\033c")
#####################################################
## Stream ##
#####################################################
def main():
ss = Realsense(1)
ss.get_Intrinsics()
# print(ss.get_Intrinsics())
zm = []
while True:
cviz, dviz = ss.output_image()
print(dviz.shape)
x,y,z = ss.get_Depth_Point(640,360)
cv2.imshow('test', cviz)
print('x : {}, y : {}, z : {}'.format(x, y, z))
key = cv2.waitKey(1)
if key & 0xFF == ord('q') or key == 27:
cv2.destroyAllWindows()
break
if __name__ == "__main__":
main() | 26.090909 | 69 | 0.337979 | true | true | |
f73d169f0c4f6941d6d154dba79e8ea19c3c2e6a | 43,484 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/trafficitem.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/trafficitem.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/trafficitem.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class TrafficItem(Base):
"""This object specifies the particular traffic item related properties.
The TrafficItem class encapsulates a list of trafficItem resources that are managed by the user.
A list of resources can be retrieved from the server using the TrafficItem.find() method.
The list can be managed by using the TrafficItem.add() and TrafficItem.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'trafficItem'
_SDM_ATT_MAP = {
'AllowSelfDestined': 'allowSelfDestined',
'BiDirectional': 'biDirectional',
'EgressEnabled': 'egressEnabled',
'EnableDynamicMplsLabelValues': 'enableDynamicMplsLabelValues',
'EnableMacsecEgressOnlyAutoConfig': 'enableMacsecEgressOnlyAutoConfig',
'Enabled': 'enabled',
'Errors': 'errors',
'FlowGroupCount': 'flowGroupCount',
'FrerDuplicateElimination': 'frerDuplicateElimination',
'HasOpenFlow': 'hasOpenFlow',
'HostsPerNetwork': 'hostsPerNetwork',
'InterAsBgpPreference': 'interAsBgpPreference',
'InterAsLdpPreference': 'interAsLdpPreference',
'LabelPreferences': 'labelPreferences',
'MaxNumberOfVpnLabelStack': 'maxNumberOfVpnLabelStack',
'MergeDestinations': 'mergeDestinations',
'MulticastForwardingMode': 'multicastForwardingMode',
'Name': 'name',
'NumVlansForMulticastReplication': 'numVlansForMulticastReplication',
'OrdinalNo': 'ordinalNo',
'OriginatorType': 'originatorType',
'RegenerateCount': 'regenerateCount',
'RoundRobinPacketOrdering': 'roundRobinPacketOrdering',
'RouteMesh': 'routeMesh',
'SrcDestMesh': 'srcDestMesh',
'State': 'state',
'Suspend': 'suspend',
'TrafficItemType': 'trafficItemType',
'TrafficType': 'trafficType',
'TransmitMode': 'transmitMode',
'TransportLdpPreference': 'transportLdpPreference',
'TransportRsvpTePreference': 'transportRsvpTePreference',
'UseControlPlaneFrameSize': 'useControlPlaneFrameSize',
'UseControlPlaneRate': 'useControlPlaneRate',
'Warnings': 'warnings',
}
def __init__(self, parent):
super(TrafficItem, self).__init__(parent)
@property
def ConfigElement(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.configelement.configelement.ConfigElement): An instance of the ConfigElement class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.configelement.configelement import ConfigElement
return ConfigElement(self)
@property
def EgressTracking(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.egresstracking.egresstracking.EgressTracking): An instance of the EgressTracking class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.egresstracking.egresstracking import EgressTracking
return EgressTracking(self)
@property
def EndpointSet(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.endpointset.endpointset.EndpointSet): An instance of the EndpointSet class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.endpointset.endpointset import EndpointSet
return EndpointSet(self)
@property
def HighLevelStream(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.highlevelstream.highlevelstream.HighLevelStream): An instance of the HighLevelStream class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.highlevelstream.highlevelstream import HighLevelStream
return HighLevelStream(self)
@property
def Tracking(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.tracking.tracking.Tracking): An instance of the Tracking class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.tracking.tracking import Tracking
return Tracking(self)
@property
def AllowSelfDestined(self):
"""
Returns
-------
- bool: If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
"""
return self._get_attribute(self._SDM_ATT_MAP['AllowSelfDestined'])
@AllowSelfDestined.setter
def AllowSelfDestined(self, value):
self._set_attribute(self._SDM_ATT_MAP['AllowSelfDestined'], value)
@property
def BiDirectional(self):
"""
Returns
-------
- bool: If true, this enables traffic to be sent in forward and reverse destination.
"""
return self._get_attribute(self._SDM_ATT_MAP['BiDirectional'])
@BiDirectional.setter
def BiDirectional(self, value):
self._set_attribute(self._SDM_ATT_MAP['BiDirectional'], value)
@property
def EgressEnabled(self):
"""
Returns
-------
- bool: Enables the egress.
"""
return self._get_attribute(self._SDM_ATT_MAP['EgressEnabled'])
@EgressEnabled.setter
def EgressEnabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['EgressEnabled'], value)
@property
def EnableDynamicMplsLabelValues(self):
"""
Returns
-------
- bool: Enables the dynamic MPLS label values.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableDynamicMplsLabelValues'])
@EnableDynamicMplsLabelValues.setter
def EnableDynamicMplsLabelValues(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableDynamicMplsLabelValues'], value)
@property
def EnableMacsecEgressOnlyAutoConfig(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableMacsecEgressOnlyAutoConfig'])
@EnableMacsecEgressOnlyAutoConfig.setter
def EnableMacsecEgressOnlyAutoConfig(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableMacsecEgressOnlyAutoConfig'], value)
@property
def Enabled(self):
"""
Returns
-------
- bool: If true, this enables the selected traffic item.
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def Errors(self):
"""
Returns
-------
- list(str): Displays the errors.
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def FlowGroupCount(self):
"""
Returns
-------
- number: Indicates the number of flow groups.
"""
return self._get_attribute(self._SDM_ATT_MAP['FlowGroupCount'])
@property
def FrerDuplicateElimination(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['FrerDuplicateElimination'])
@FrerDuplicateElimination.setter
def FrerDuplicateElimination(self, value):
self._set_attribute(self._SDM_ATT_MAP['FrerDuplicateElimination'], value)
@property
def HasOpenFlow(self):
"""
Returns
-------
- bool: Indicates whether or not this trafficItem has openflow.
"""
return self._get_attribute(self._SDM_ATT_MAP['HasOpenFlow'])
@HasOpenFlow.setter
def HasOpenFlow(self, value):
self._set_attribute(self._SDM_ATT_MAP['HasOpenFlow'], value)
@property
def HostsPerNetwork(self):
"""
Returns
-------
- number: The number of emulated hosts for the traffic stream.
"""
return self._get_attribute(self._SDM_ATT_MAP['HostsPerNetwork'])
@HostsPerNetwork.setter
def HostsPerNetwork(self, value):
self._set_attribute(self._SDM_ATT_MAP['HostsPerNetwork'], value)
@property
def InterAsBgpPreference(self):
"""DEPRECATED
Returns
-------
- str(one | two): This attribute is deprecated. Use labelPreferences attribute instead.
"""
return self._get_attribute(self._SDM_ATT_MAP['InterAsBgpPreference'])
@InterAsBgpPreference.setter
def InterAsBgpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['InterAsBgpPreference'], value)
@property
def InterAsLdpPreference(self):
"""DEPRECATED
Returns
-------
- str(one | two): This attribute is deprecated. Use labelPreferences attribute instead.
"""
return self._get_attribute(self._SDM_ATT_MAP['InterAsLdpPreference'])
@InterAsLdpPreference.setter
def InterAsLdpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['InterAsLdpPreference'], value)
@property
def LabelPreferences(self):
"""
Returns
-------
- list(dict(labelCategory:str[interAsRegionLsp | interAsRegionLspClassic | ipTransportLsp | transportLspClassic | vpnTransportLsp],labelPreferenceInput:str[auto | custom | none],labelProviderList:list[str[bgpLuSr | bgpLuSrInterAs | bgpv6LuSr | isisSr | ldp | ospfSr | ospfv3Sr | rsvp | targetedLdpInterAs]])): List of label preferences per Label Category defined as List[Label Category, Label Category input type, List of Label Providers in the preferred order]
"""
return self._get_attribute(self._SDM_ATT_MAP['LabelPreferences'])
@LabelPreferences.setter
def LabelPreferences(self, value):
self._set_attribute(self._SDM_ATT_MAP['LabelPreferences'], value)
@property
def MaxNumberOfVpnLabelStack(self):
"""
Returns
-------
- number: Signifies the maximum number of VPN label stack
"""
return self._get_attribute(self._SDM_ATT_MAP['MaxNumberOfVpnLabelStack'])
@MaxNumberOfVpnLabelStack.setter
def MaxNumberOfVpnLabelStack(self, value):
self._set_attribute(self._SDM_ATT_MAP['MaxNumberOfVpnLabelStack'], value)
@property
def MergeDestinations(self):
"""
Returns
-------
- bool: If true, merges the traffic flow in the destination ranges.
"""
return self._get_attribute(self._SDM_ATT_MAP['MergeDestinations'])
@MergeDestinations.setter
def MergeDestinations(self, value):
self._set_attribute(self._SDM_ATT_MAP['MergeDestinations'], value)
@property
def MulticastForwardingMode(self):
"""
Returns
-------
- str(loadBalancing | replication):
"""
return self._get_attribute(self._SDM_ATT_MAP['MulticastForwardingMode'])
@MulticastForwardingMode.setter
def MulticastForwardingMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['MulticastForwardingMode'], value)
@property
def Name(self):
"""
Returns
-------
- str: The name of the traffic item.
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NumVlansForMulticastReplication(self):
"""
Returns
-------
- number: Set the number of vlans for multicast replication
"""
return self._get_attribute(self._SDM_ATT_MAP['NumVlansForMulticastReplication'])
@NumVlansForMulticastReplication.setter
def NumVlansForMulticastReplication(self, value):
self._set_attribute(self._SDM_ATT_MAP['NumVlansForMulticastReplication'], value)
@property
def OrdinalNo(self):
"""
Returns
-------
- number: Signifies the ordinal number
"""
return self._get_attribute(self._SDM_ATT_MAP['OrdinalNo'])
@OrdinalNo.setter
def OrdinalNo(self, value):
self._set_attribute(self._SDM_ATT_MAP['OrdinalNo'], value)
@property
def OriginatorType(self):
"""
Returns
-------
- str(endUser | quickTest): Indicates who created this trafficItem.
"""
return self._get_attribute(self._SDM_ATT_MAP['OriginatorType'])
@OriginatorType.setter
def OriginatorType(self, value):
self._set_attribute(self._SDM_ATT_MAP['OriginatorType'], value)
@property
def RegenerateCount(self):
"""
Returns
-------
- number:
"""
return self._get_attribute(self._SDM_ATT_MAP['RegenerateCount'])
@property
def RoundRobinPacketOrdering(self):
"""
Returns
-------
- bool: This option enables Round Robin Packet Ordering within endpoints across Rx ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['RoundRobinPacketOrdering'])
@RoundRobinPacketOrdering.setter
def RoundRobinPacketOrdering(self, value):
self._set_attribute(self._SDM_ATT_MAP['RoundRobinPacketOrdering'], value)
@property
def RouteMesh(self):
"""
Returns
-------
- str(fullMesh | oneToOne): The traffic flow type between each pair of source route endpoint and destination route endpoint.
"""
return self._get_attribute(self._SDM_ATT_MAP['RouteMesh'])
@RouteMesh.setter
def RouteMesh(self, value):
self._set_attribute(self._SDM_ATT_MAP['RouteMesh'], value)
@property
def SrcDestMesh(self):
"""
Returns
-------
- str(fullMesh | manyToMany | none | oneToOne): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
"""
return self._get_attribute(self._SDM_ATT_MAP['SrcDestMesh'])
@SrcDestMesh.setter
def SrcDestMesh(self, value):
self._set_attribute(self._SDM_ATT_MAP['SrcDestMesh'], value)
@property
def State(self):
"""
Returns
-------
- str: (Read only) A read-only field which indicates the current state of the traffic item.
"""
return self._get_attribute(self._SDM_ATT_MAP['State'])
@property
def Suspend(self):
"""
Returns
-------
- bool: Suspends all traffic on this stream.
"""
return self._get_attribute(self._SDM_ATT_MAP['Suspend'])
@Suspend.setter
def Suspend(self, value):
self._set_attribute(self._SDM_ATT_MAP['Suspend'], value)
@property
def TrafficItemType(self):
"""
Returns
-------
- str(application | applicationLibrary | l2L3 | quick): Helps to configure and edit a traffic item that is sent across Ixia ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['TrafficItemType'])
@TrafficItemType.setter
def TrafficItemType(self, value):
self._set_attribute(self._SDM_ATT_MAP['TrafficItemType'], value)
@property
def TrafficType(self):
"""
Returns
-------
- str(atm | avb1722 | avbRaw | ethernetVlan | fc | fcoe | frameRelay | hdlc | ipv4 | ipv4ApplicationTraffic | ipv6 | ipv6ApplicationTraffic | ppp | raw): Helps to select the type of traffic endpoint to be configured.
"""
return self._get_attribute(self._SDM_ATT_MAP['TrafficType'])
@TrafficType.setter
def TrafficType(self, value):
self._set_attribute(self._SDM_ATT_MAP['TrafficType'], value)
@property
def TransmitMode(self):
"""
Returns
-------
- str(interleaved | sequential): The transmit mode for this traffic item
"""
return self._get_attribute(self._SDM_ATT_MAP['TransmitMode'])
@TransmitMode.setter
def TransmitMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransmitMode'], value)
@property
def TransportLdpPreference(self):
"""DEPRECATED
Returns
-------
- str(one | two): This attribute is deprecated. Use labelPreferences attribute instead.
"""
return self._get_attribute(self._SDM_ATT_MAP['TransportLdpPreference'])
@TransportLdpPreference.setter
def TransportLdpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransportLdpPreference'], value)
@property
def TransportRsvpTePreference(self):
"""DEPRECATED
Returns
-------
- str(one | two): This attribute is deprecated. Use labelPreferences attribute instead.
"""
return self._get_attribute(self._SDM_ATT_MAP['TransportRsvpTePreference'])
@TransportRsvpTePreference.setter
def TransportRsvpTePreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransportRsvpTePreference'], value)
@property
def UseControlPlaneFrameSize(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['UseControlPlaneFrameSize'])
@UseControlPlaneFrameSize.setter
def UseControlPlaneFrameSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['UseControlPlaneFrameSize'], value)
@property
def UseControlPlaneRate(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['UseControlPlaneRate'])
@UseControlPlaneRate.setter
def UseControlPlaneRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['UseControlPlaneRate'], value)
@property
def Warnings(self):
"""
Returns
-------
- list(str): Displays the warnings.
"""
return self._get_attribute(self._SDM_ATT_MAP['Warnings'])
def update(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
"""Updates trafficItem resource on the server.
Args
----
- AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
- BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
- EgressEnabled (bool): Enables the egress.
- EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
- EnableMacsecEgressOnlyAutoConfig (bool):
- Enabled (bool): If true, this enables the selected traffic item.
- FrerDuplicateElimination (bool):
- HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
- HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
- InterAsBgpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- InterAsLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- LabelPreferences (list(dict(labelCategory:str[interAsRegionLsp | interAsRegionLspClassic | ipTransportLsp | transportLspClassic | vpnTransportLsp],labelPreferenceInput:str[auto | custom | none],labelProviderList:list[str[bgpLuSr | bgpLuSrInterAs | bgpv6LuSr | isisSr | ldp | ospfSr | ospfv3Sr | rsvp | targetedLdpInterAs]]))): List of label preferences per Label Category defined as List[Label Category, Label Category input type, List of Label Providers in the preferred order]
- MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
- MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
- MulticastForwardingMode (str(loadBalancing | replication)):
- Name (str): The name of the traffic item.
- NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
- OrdinalNo (number): Signifies the ordinal number
- OriginatorType (str(endUser | quickTest)): Indicates who created this trafficItem.
- RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
- RouteMesh (str(fullMesh | oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
- SrcDestMesh (str(fullMesh | manyToMany | none | oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
- Suspend (bool): Suspends all traffic on this stream.
- TrafficItemType (str(application | applicationLibrary | l2L3 | quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
- TrafficType (str(atm | avb1722 | avbRaw | ethernetVlan | fc | fcoe | frameRelay | hdlc | ipv4 | ipv4ApplicationTraffic | ipv6 | ipv6ApplicationTraffic | ppp | raw)): Helps to select the type of traffic endpoint to be configured.
- TransmitMode (str(interleaved | sequential)): The transmit mode for this traffic item
- TransportLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- TransportRsvpTePreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- UseControlPlaneFrameSize (bool):
- UseControlPlaneRate (bool):
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
"""Adds a new trafficItem resource on the server and adds it to the container.
Args
----
- AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
- BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
- EgressEnabled (bool): Enables the egress.
- EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
- EnableMacsecEgressOnlyAutoConfig (bool):
- Enabled (bool): If true, this enables the selected traffic item.
- FrerDuplicateElimination (bool):
- HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
- HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
- InterAsBgpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- InterAsLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- LabelPreferences (list(dict(labelCategory:str[interAsRegionLsp | interAsRegionLspClassic | ipTransportLsp | transportLspClassic | vpnTransportLsp],labelPreferenceInput:str[auto | custom | none],labelProviderList:list[str[bgpLuSr | bgpLuSrInterAs | bgpv6LuSr | isisSr | ldp | ospfSr | ospfv3Sr | rsvp | targetedLdpInterAs]]))): List of label preferences per Label Category defined as List[Label Category, Label Category input type, List of Label Providers in the preferred order]
- MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
- MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
- MulticastForwardingMode (str(loadBalancing | replication)):
- Name (str): The name of the traffic item.
- NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
- OrdinalNo (number): Signifies the ordinal number
- OriginatorType (str(endUser | quickTest)): Indicates who created this trafficItem.
- RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
- RouteMesh (str(fullMesh | oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
- SrcDestMesh (str(fullMesh | manyToMany | none | oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
- Suspend (bool): Suspends all traffic on this stream.
- TrafficItemType (str(application | applicationLibrary | l2L3 | quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
- TrafficType (str(atm | avb1722 | avbRaw | ethernetVlan | fc | fcoe | frameRelay | hdlc | ipv4 | ipv4ApplicationTraffic | ipv6 | ipv6ApplicationTraffic | ppp | raw)): Helps to select the type of traffic endpoint to be configured.
- TransmitMode (str(interleaved | sequential)): The transmit mode for this traffic item
- TransportLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- TransportRsvpTePreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- UseControlPlaneFrameSize (bool):
- UseControlPlaneRate (bool):
Returns
-------
- self: This instance with all currently retrieved trafficItem resources using find and the newly added trafficItem resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained trafficItem resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, Errors=None, FlowGroupCount=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RegenerateCount=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, State=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None, Warnings=None):
"""Finds and retrieves trafficItem resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve trafficItem resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all trafficItem resources from the server.
Args
----
- AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
- BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
- EgressEnabled (bool): Enables the egress.
- EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
- EnableMacsecEgressOnlyAutoConfig (bool):
- Enabled (bool): If true, this enables the selected traffic item.
- Errors (list(str)): Displays the errors.
- FlowGroupCount (number): Indicates the number of flow groups.
- FrerDuplicateElimination (bool):
- HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
- HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
- InterAsBgpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- InterAsLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- LabelPreferences (list(dict(labelCategory:str[interAsRegionLsp | interAsRegionLspClassic | ipTransportLsp | transportLspClassic | vpnTransportLsp],labelPreferenceInput:str[auto | custom | none],labelProviderList:list[str[bgpLuSr | bgpLuSrInterAs | bgpv6LuSr | isisSr | ldp | ospfSr | ospfv3Sr | rsvp | targetedLdpInterAs]]))): List of label preferences per Label Category defined as List[Label Category, Label Category input type, List of Label Providers in the preferred order]
- MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
- MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
- MulticastForwardingMode (str(loadBalancing | replication)):
- Name (str): The name of the traffic item.
- NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
- OrdinalNo (number): Signifies the ordinal number
- OriginatorType (str(endUser | quickTest)): Indicates who created this trafficItem.
- RegenerateCount (number):
- RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
- RouteMesh (str(fullMesh | oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
- SrcDestMesh (str(fullMesh | manyToMany | none | oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
- State (str): (Read only) A read-only field which indicates the current state of the traffic item.
- Suspend (bool): Suspends all traffic on this stream.
- TrafficItemType (str(application | applicationLibrary | l2L3 | quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
- TrafficType (str(atm | avb1722 | avbRaw | ethernetVlan | fc | fcoe | frameRelay | hdlc | ipv4 | ipv4ApplicationTraffic | ipv6 | ipv6ApplicationTraffic | ppp | raw)): Helps to select the type of traffic endpoint to be configured.
- TransmitMode (str(interleaved | sequential)): The transmit mode for this traffic item
- TransportLdpPreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- TransportRsvpTePreference (str(one | two)): This attribute is deprecated. Use labelPreferences attribute instead.
- UseControlPlaneFrameSize (bool):
- UseControlPlaneRate (bool):
- Warnings (list(str)): Displays the warnings.
Returns
-------
- self: This instance with matching trafficItem resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of trafficItem data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the trafficItem resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def ConvertToRaw(self):
"""Executes the convertToRaw operation on the server.
Converts a non-raw traffic item to a raw traffic item.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('convertToRaw', payload=payload, response_object=None)
def Duplicate(self, *args, **kwargs):
"""Executes the duplicate operation on the server.
Duplicates a specific traffic item.
duplicate(Arg2=number)
----------------------
- Arg2 (number): The number of times to duplicate the traffic item.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('duplicate', payload=payload, response_object=None)
def DuplicateItems(self):
"""Executes the duplicateItems operation on the server.
Duplicates a list of traffic items.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('duplicateItems', payload=payload, response_object=None)
def Generate(self):
"""Executes the generate operation on the server.
Generate traffic for specific traffic items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('generate', payload=payload, response_object=None)
def PauseStatelessTraffic(self, *args, **kwargs):
"""Executes the pauseStatelessTraffic operation on the server.
Pause or Resume stateless traffic.
pauseStatelessTraffic(Arg2=bool)
--------------------------------
- Arg2 (bool): If true, it will pause running traffic. If false, it will resume previously paused traffic.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('pauseStatelessTraffic', payload=payload, response_object=None)
def ResolveAptixiaEndpoints(self):
"""Executes the resolveAptixiaEndpoints operation on the server.
Resolves /vport/protocolStack/. endpoints being used by a specific traffic item.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('resolveAptixiaEndpoints', payload=payload, response_object=None)
def StartDefaultLearning(self):
"""Executes the startDefaultLearning operation on the server.
Starts default learning for a list of traffic items.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startDefaultLearning', payload=payload, response_object=None)
def StartLearning(self, *args, **kwargs):
"""Executes the startLearning operation on the server.
Sends learning frames.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
startLearning(Arg2=number, Arg3=number, Arg4=number)
----------------------------------------------------
- Arg2 (number): The framesize of the learning frame.
- Arg3 (number): The framecount of the learning frames.
- Arg4 (number): The frames per second of the learning frames.
startLearning(Arg2=number, Arg3=number, Arg4=number, Arg5=bool, Arg6=bool, Arg7=bool)
-------------------------------------------------------------------------------------
- Arg2 (number): The framesize of the learning frame.
- Arg3 (number): The framecount of the learning frames.
- Arg4 (number): The frames per second of the learning frames.
- Arg5 (bool): Send gratuitous ARP frames.
- Arg6 (bool): Send MAC frames.
- Arg7 (bool): Send Fast Path frames.
startLearning(Arg2=number, Arg3=number, Arg4=number, Arg5=bool, Arg6=bool, Arg7=bool, Arg8=bool)
------------------------------------------------------------------------------------------------
- Arg2 (number): The framesize of the learning frame.
- Arg3 (number): The framecount of the learning frames.
- Arg4 (number): The frames per second of the learning frames.
- Arg5 (bool): Send gratuitous ARP frames.
- Arg6 (bool): Send MAC frames.
- Arg7 (bool): Send Fast Path frames.
- Arg8 (bool): Send full mesh.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('startLearning', payload=payload, response_object=None)
def StartStatelessTraffic(self):
"""Executes the startStatelessTraffic operation on the server.
Start the traffic configuration for stateless traffic items only.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startStatelessTraffic', payload=payload, response_object=None)
def StartStatelessTrafficBlocking(self):
"""Executes the startStatelessTrafficBlocking operation on the server.
Start the traffic configuration for stateless traffic items only. This will block until traffic is fully started.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startStatelessTrafficBlocking', payload=payload, response_object=None)
def StopStatelessTraffic(self):
"""Executes the stopStatelessTraffic operation on the server.
Stop the stateless traffic items.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('stopStatelessTraffic', payload=payload, response_object=None)
def StopStatelessTrafficBlocking(self):
"""Executes the stopStatelessTrafficBlocking operation on the server.
Stop the traffic configuration for stateless traffic items only. This will block until traffic is fully stopped.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('stopStatelessTrafficBlocking', payload=payload, response_object=None)
| 47.16269 | 829 | 0.680365 |
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class TrafficItem(Base):
__slots__ = ()
_SDM_NAME = 'trafficItem'
_SDM_ATT_MAP = {
'AllowSelfDestined': 'allowSelfDestined',
'BiDirectional': 'biDirectional',
'EgressEnabled': 'egressEnabled',
'EnableDynamicMplsLabelValues': 'enableDynamicMplsLabelValues',
'EnableMacsecEgressOnlyAutoConfig': 'enableMacsecEgressOnlyAutoConfig',
'Enabled': 'enabled',
'Errors': 'errors',
'FlowGroupCount': 'flowGroupCount',
'FrerDuplicateElimination': 'frerDuplicateElimination',
'HasOpenFlow': 'hasOpenFlow',
'HostsPerNetwork': 'hostsPerNetwork',
'InterAsBgpPreference': 'interAsBgpPreference',
'InterAsLdpPreference': 'interAsLdpPreference',
'LabelPreferences': 'labelPreferences',
'MaxNumberOfVpnLabelStack': 'maxNumberOfVpnLabelStack',
'MergeDestinations': 'mergeDestinations',
'MulticastForwardingMode': 'multicastForwardingMode',
'Name': 'name',
'NumVlansForMulticastReplication': 'numVlansForMulticastReplication',
'OrdinalNo': 'ordinalNo',
'OriginatorType': 'originatorType',
'RegenerateCount': 'regenerateCount',
'RoundRobinPacketOrdering': 'roundRobinPacketOrdering',
'RouteMesh': 'routeMesh',
'SrcDestMesh': 'srcDestMesh',
'State': 'state',
'Suspend': 'suspend',
'TrafficItemType': 'trafficItemType',
'TrafficType': 'trafficType',
'TransmitMode': 'transmitMode',
'TransportLdpPreference': 'transportLdpPreference',
'TransportRsvpTePreference': 'transportRsvpTePreference',
'UseControlPlaneFrameSize': 'useControlPlaneFrameSize',
'UseControlPlaneRate': 'useControlPlaneRate',
'Warnings': 'warnings',
}
def __init__(self, parent):
super(TrafficItem, self).__init__(parent)
@property
def ConfigElement(self):
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.configelement.configelement import ConfigElement
return ConfigElement(self)
@property
def EgressTracking(self):
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.egresstracking.egresstracking import EgressTracking
return EgressTracking(self)
@property
def EndpointSet(self):
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.endpointset.endpointset import EndpointSet
return EndpointSet(self)
@property
def HighLevelStream(self):
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.highlevelstream.highlevelstream import HighLevelStream
return HighLevelStream(self)
@property
def Tracking(self):
from uhd_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.tracking.tracking import Tracking
return Tracking(self)
@property
def AllowSelfDestined(self):
return self._get_attribute(self._SDM_ATT_MAP['AllowSelfDestined'])
@AllowSelfDestined.setter
def AllowSelfDestined(self, value):
self._set_attribute(self._SDM_ATT_MAP['AllowSelfDestined'], value)
@property
def BiDirectional(self):
return self._get_attribute(self._SDM_ATT_MAP['BiDirectional'])
@BiDirectional.setter
def BiDirectional(self, value):
self._set_attribute(self._SDM_ATT_MAP['BiDirectional'], value)
@property
def EgressEnabled(self):
return self._get_attribute(self._SDM_ATT_MAP['EgressEnabled'])
@EgressEnabled.setter
def EgressEnabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['EgressEnabled'], value)
@property
def EnableDynamicMplsLabelValues(self):
return self._get_attribute(self._SDM_ATT_MAP['EnableDynamicMplsLabelValues'])
@EnableDynamicMplsLabelValues.setter
def EnableDynamicMplsLabelValues(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableDynamicMplsLabelValues'], value)
@property
def EnableMacsecEgressOnlyAutoConfig(self):
return self._get_attribute(self._SDM_ATT_MAP['EnableMacsecEgressOnlyAutoConfig'])
@EnableMacsecEgressOnlyAutoConfig.setter
def EnableMacsecEgressOnlyAutoConfig(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableMacsecEgressOnlyAutoConfig'], value)
@property
def Enabled(self):
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def Errors(self):
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def FlowGroupCount(self):
return self._get_attribute(self._SDM_ATT_MAP['FlowGroupCount'])
@property
def FrerDuplicateElimination(self):
return self._get_attribute(self._SDM_ATT_MAP['FrerDuplicateElimination'])
@FrerDuplicateElimination.setter
def FrerDuplicateElimination(self, value):
self._set_attribute(self._SDM_ATT_MAP['FrerDuplicateElimination'], value)
@property
def HasOpenFlow(self):
return self._get_attribute(self._SDM_ATT_MAP['HasOpenFlow'])
@HasOpenFlow.setter
def HasOpenFlow(self, value):
self._set_attribute(self._SDM_ATT_MAP['HasOpenFlow'], value)
@property
def HostsPerNetwork(self):
return self._get_attribute(self._SDM_ATT_MAP['HostsPerNetwork'])
@HostsPerNetwork.setter
def HostsPerNetwork(self, value):
self._set_attribute(self._SDM_ATT_MAP['HostsPerNetwork'], value)
@property
def InterAsBgpPreference(self):
return self._get_attribute(self._SDM_ATT_MAP['InterAsBgpPreference'])
@InterAsBgpPreference.setter
def InterAsBgpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['InterAsBgpPreference'], value)
@property
def InterAsLdpPreference(self):
return self._get_attribute(self._SDM_ATT_MAP['InterAsLdpPreference'])
@InterAsLdpPreference.setter
def InterAsLdpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['InterAsLdpPreference'], value)
@property
def LabelPreferences(self):
return self._get_attribute(self._SDM_ATT_MAP['LabelPreferences'])
@LabelPreferences.setter
def LabelPreferences(self, value):
self._set_attribute(self._SDM_ATT_MAP['LabelPreferences'], value)
@property
def MaxNumberOfVpnLabelStack(self):
return self._get_attribute(self._SDM_ATT_MAP['MaxNumberOfVpnLabelStack'])
@MaxNumberOfVpnLabelStack.setter
def MaxNumberOfVpnLabelStack(self, value):
self._set_attribute(self._SDM_ATT_MAP['MaxNumberOfVpnLabelStack'], value)
@property
def MergeDestinations(self):
return self._get_attribute(self._SDM_ATT_MAP['MergeDestinations'])
@MergeDestinations.setter
def MergeDestinations(self, value):
self._set_attribute(self._SDM_ATT_MAP['MergeDestinations'], value)
@property
def MulticastForwardingMode(self):
return self._get_attribute(self._SDM_ATT_MAP['MulticastForwardingMode'])
@MulticastForwardingMode.setter
def MulticastForwardingMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['MulticastForwardingMode'], value)
@property
def Name(self):
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NumVlansForMulticastReplication(self):
return self._get_attribute(self._SDM_ATT_MAP['NumVlansForMulticastReplication'])
@NumVlansForMulticastReplication.setter
def NumVlansForMulticastReplication(self, value):
self._set_attribute(self._SDM_ATT_MAP['NumVlansForMulticastReplication'], value)
@property
def OrdinalNo(self):
return self._get_attribute(self._SDM_ATT_MAP['OrdinalNo'])
@OrdinalNo.setter
def OrdinalNo(self, value):
self._set_attribute(self._SDM_ATT_MAP['OrdinalNo'], value)
@property
def OriginatorType(self):
return self._get_attribute(self._SDM_ATT_MAP['OriginatorType'])
@OriginatorType.setter
def OriginatorType(self, value):
self._set_attribute(self._SDM_ATT_MAP['OriginatorType'], value)
@property
def RegenerateCount(self):
return self._get_attribute(self._SDM_ATT_MAP['RegenerateCount'])
@property
def RoundRobinPacketOrdering(self):
return self._get_attribute(self._SDM_ATT_MAP['RoundRobinPacketOrdering'])
@RoundRobinPacketOrdering.setter
def RoundRobinPacketOrdering(self, value):
self._set_attribute(self._SDM_ATT_MAP['RoundRobinPacketOrdering'], value)
@property
def RouteMesh(self):
return self._get_attribute(self._SDM_ATT_MAP['RouteMesh'])
@RouteMesh.setter
def RouteMesh(self, value):
self._set_attribute(self._SDM_ATT_MAP['RouteMesh'], value)
@property
def SrcDestMesh(self):
return self._get_attribute(self._SDM_ATT_MAP['SrcDestMesh'])
@SrcDestMesh.setter
def SrcDestMesh(self, value):
self._set_attribute(self._SDM_ATT_MAP['SrcDestMesh'], value)
@property
def State(self):
return self._get_attribute(self._SDM_ATT_MAP['State'])
@property
def Suspend(self):
return self._get_attribute(self._SDM_ATT_MAP['Suspend'])
@Suspend.setter
def Suspend(self, value):
self._set_attribute(self._SDM_ATT_MAP['Suspend'], value)
@property
def TrafficItemType(self):
return self._get_attribute(self._SDM_ATT_MAP['TrafficItemType'])
@TrafficItemType.setter
def TrafficItemType(self, value):
self._set_attribute(self._SDM_ATT_MAP['TrafficItemType'], value)
@property
def TrafficType(self):
return self._get_attribute(self._SDM_ATT_MAP['TrafficType'])
@TrafficType.setter
def TrafficType(self, value):
self._set_attribute(self._SDM_ATT_MAP['TrafficType'], value)
@property
def TransmitMode(self):
return self._get_attribute(self._SDM_ATT_MAP['TransmitMode'])
@TransmitMode.setter
def TransmitMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransmitMode'], value)
@property
def TransportLdpPreference(self):
return self._get_attribute(self._SDM_ATT_MAP['TransportLdpPreference'])
@TransportLdpPreference.setter
def TransportLdpPreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransportLdpPreference'], value)
@property
def TransportRsvpTePreference(self):
return self._get_attribute(self._SDM_ATT_MAP['TransportRsvpTePreference'])
@TransportRsvpTePreference.setter
def TransportRsvpTePreference(self, value):
self._set_attribute(self._SDM_ATT_MAP['TransportRsvpTePreference'], value)
@property
def UseControlPlaneFrameSize(self):
return self._get_attribute(self._SDM_ATT_MAP['UseControlPlaneFrameSize'])
@UseControlPlaneFrameSize.setter
def UseControlPlaneFrameSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['UseControlPlaneFrameSize'], value)
@property
def UseControlPlaneRate(self):
return self._get_attribute(self._SDM_ATT_MAP['UseControlPlaneRate'])
@UseControlPlaneRate.setter
def UseControlPlaneRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['UseControlPlaneRate'], value)
@property
def Warnings(self):
return self._get_attribute(self._SDM_ATT_MAP['Warnings'])
def update(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
self._delete()
def find(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, EnableMacsecEgressOnlyAutoConfig=None, Enabled=None, Errors=None, FlowGroupCount=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, LabelPreferences=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RegenerateCount=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, State=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None, Warnings=None):
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
return self._read(href)
def ConvertToRaw(self):
payload = { "Arg1": self.href }
return self._execute('convertToRaw', payload=payload, response_object=None)
def Duplicate(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('duplicate', payload=payload, response_object=None)
def DuplicateItems(self):
payload = { "Arg1": self }
return self._execute('duplicateItems', payload=payload, response_object=None)
def Generate(self):
payload = { "Arg1": self }
return self._execute('generate', payload=payload, response_object=None)
def PauseStatelessTraffic(self, *args, **kwargs):
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('pauseStatelessTraffic', payload=payload, response_object=None)
def ResolveAptixiaEndpoints(self):
payload = { "Arg1": self }
return self._execute('resolveAptixiaEndpoints', payload=payload, response_object=None)
def StartDefaultLearning(self):
payload = { "Arg1": self }
return self._execute('startDefaultLearning', payload=payload, response_object=None)
def StartLearning(self, *args, **kwargs):
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('startLearning', payload=payload, response_object=None)
def StartStatelessTraffic(self):
payload = { "Arg1": self }
return self._execute('startStatelessTraffic', payload=payload, response_object=None)
def StartStatelessTrafficBlocking(self):
payload = { "Arg1": self }
return self._execute('startStatelessTrafficBlocking', payload=payload, response_object=None)
def StopStatelessTraffic(self):
payload = { "Arg1": self }
return self._execute('stopStatelessTraffic', payload=payload, response_object=None)
def StopStatelessTrafficBlocking(self):
payload = { "Arg1": self }
return self._execute('stopStatelessTrafficBlocking', payload=payload, response_object=None)
| true | true |
f73d1836dc7e1efb37ef8462febf2641fff639d7 | 353 | py | Python | src/adafruit-circuitpython-bundle-4.x-mpy-20190713/examples/circuitplayground_light.py | mbaaba/solar_panel | 42059d8c61320494ad1298065dbc50cd9b3bd51e | [
"MIT"
] | null | null | null | src/adafruit-circuitpython-bundle-4.x-mpy-20190713/examples/circuitplayground_light.py | mbaaba/solar_panel | 42059d8c61320494ad1298065dbc50cd9b3bd51e | [
"MIT"
] | null | null | null | src/adafruit-circuitpython-bundle-4.x-mpy-20190713/examples/circuitplayground_light.py | mbaaba/solar_panel | 42059d8c61320494ad1298065dbc50cd9b3bd51e | [
"MIT"
] | null | null | null | """This example uses the light sensor on your CPX, located next to the picture of the eye. Try
shining a flashlight on your CPX, or covering the light sensor with your finger to see the values
increase and decrease."""
import time
from adafruit_circuitplayground.express import cpx
while True:
print("Light:", cpx.light)
time.sleep(1)
| 35.3 | 98 | 0.745042 | import time
from adafruit_circuitplayground.express import cpx
while True:
print("Light:", cpx.light)
time.sleep(1)
| true | true |
f73d18f5dae6d8719eac2d33c9bce7865b8eb90e | 401 | py | Python | openslides/users/migrations/0012_user_auth_type.py | swilde/OpenSlides | 23ae32a75892005632784652d108836d1ba09da9 | [
"MIT"
] | 3 | 2021-02-11T20:45:58.000Z | 2022-02-09T21:59:42.000Z | openslides/users/migrations/0012_user_auth_type.py | swilde/OpenSlides | 23ae32a75892005632784652d108836d1ba09da9 | [
"MIT"
] | 2 | 2021-11-02T15:48:16.000Z | 2022-03-02T08:38:19.000Z | server/openslides/users/migrations/0012_user_auth_type.py | DLRG-Jugend-NDS/OpenSlides | 03704e4852821ccd67fe23adb6e2c38b67d93732 | [
"MIT"
] | 3 | 2021-01-18T11:44:05.000Z | 2022-01-19T16:00:23.000Z | # Generated by Django 2.2.4 on 2019-08-21 12:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("users", "0011_postgresql_auth_group_id_sequence")]
operations = [
migrations.AddField(
model_name="user",
name="auth_type",
field=models.CharField(default="default", max_length=64),
)
]
| 23.588235 | 72 | 0.640898 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("users", "0011_postgresql_auth_group_id_sequence")]
operations = [
migrations.AddField(
model_name="user",
name="auth_type",
field=models.CharField(default="default", max_length=64),
)
]
| true | true |
f73d1972d37beccba0b567c5ef01ea3d09253d7f | 10,866 | py | Python | services/ui_backend_service/api/ws.py | runsascoded/metaflow-service | ac7770dfeae17fd060129d408fa3bb472fc00b86 | [
"Apache-2.0"
] | null | null | null | services/ui_backend_service/api/ws.py | runsascoded/metaflow-service | ac7770dfeae17fd060129d408fa3bb472fc00b86 | [
"Apache-2.0"
] | 1 | 2022-01-13T23:46:05.000Z | 2022-01-13T23:46:05.000Z | services/ui_backend_service/api/ws.py | valayDave/metaflow-service | 65e19aef268e9e707522ee0695fd4ebaee42aa69 | [
"Apache-2.0"
] | null | null | null | import os
import json
import time
import asyncio
import collections
from aiohttp import web, WSMsgType
from typing import List, Dict, Any, Callable
from .utils import resource_conditions, TTLQueue
from services.utils import logging
from pyee import AsyncIOEventEmitter
from ..data.refiner import TaskRefiner, ArtifactRefiner
from throttler import throttle_simultaneous
WS_QUEUE_TTL_SECONDS = os.environ.get("WS_QUEUE_TTL_SECONDS", 60 * 5) # 5 minute TTL by default
WS_POSTPROCESS_CONCURRENCY_LIMIT = int(os.environ.get("WS_POSTPROCESS_CONCURRENCY_LIMIT", 8))
SUBSCRIBE = 'SUBSCRIBE'
UNSUBSCRIBE = 'UNSUBSCRIBE'
WSSubscription = collections.namedtuple(
"WSSubscription", "ws disconnected_ts fullpath resource query uuid filter")
class Websocket(object):
'''
Adds a '/ws' endpoint and support for broadcasting realtime resource events to subscribed frontend clients.
Subscribe to runs created by user dipper:
/runs?_tags=user:dipper
'uuid' can be used to identify specific subscription.
Subscribe to future events:
{"type": "SUBSCRIBE", "uuid": "myst3rySh4ck", "resource": "/runs"}
Subscribing to future events and return past data since unix time (seconds):
{"type": "SUBSCRIBE", "uuid": "myst3rySh4ck", "resource": "/runs", "since": 1602752197}
Unsubscribe:
{"type": "UNSUBSCRIBE", "uuid": "myst3rySh4ck"}
Example event:
{"type": "UPDATE", "uuid": "myst3rySh4ck", "resource": "/runs", "data": {"foo": "bar"}}
'''
subscriptions: List[WSSubscription] = []
def __init__(self, app, db, event_emitter=None, queue_ttl: int = WS_QUEUE_TTL_SECONDS, cache=None):
self.event_emitter = event_emitter or AsyncIOEventEmitter()
self.db = db
self.queue = TTLQueue(queue_ttl)
self.task_refiner = TaskRefiner(cache=cache.artifact_cache) if cache else None
self.artifact_refiner = ArtifactRefiner(cache=cache.artifact_cache) if cache else None
self.logger = logging.getLogger("Websocket")
event_emitter.on('notify', self.event_handler)
app.router.add_route('GET', '/ws', self.websocket_handler)
self.loop = asyncio.get_event_loop()
async def event_handler(self, operation: str, resources: List[str], data: Dict, table_name: str = None, filter_dict: Dict = {}):
"""
Event handler for websocket events on 'notify'.
Either receives raw data from table triggers listener and either performs a database load
before broadcasting from the provided table, or receives predefined data and broadcasts it as-is.
Parameters
----------
operation : str
name of the operation related to the DB event, either 'INSERT' or 'UPDATE'
resources : List[str]
List of resource paths that this event is related to. Used strictly for broadcasting to
websocket subscriptions
data : Dict
The data of the record to be broadcast. Can either be complete, or partial.
In case of partial data (and a provided table name) this is only used for the DB query.
table_name : str (optional)
name of the table that the complete data should be queried from.
filter_dict : Dict (optional)
a dictionary of filters used in the query when fetching complete data.
"""
# Check if event needs to be broadcast (if anyone is subscribed to the resource)
if any(subscription.resource in resources for subscription in self.subscriptions):
# load the data and postprocessor for broadcasting if table
# is provided (otherwise data has already been loaded in advance)
if table_name:
table = self.db.get_table_by_name(table_name)
_postprocess = await self.get_table_postprocessor(table_name)
_data = await load_data_from_db(table, data, filter_dict, postprocess=_postprocess)
else:
_data = data
if not _data:
# Skip sending this event to subscriptions in case data is None or empty.
# This could be caused by insufficient/broken data and can break the UI.
return
# Append event to the queue so that we can later dispatch them in case of disconnections
#
# NOTE: server instance specific ws queue will not work when scaling across multiple instances.
# but on the other hand loading data and pushing everything into the queue for every server instance is also
# a suboptimal solution.
await self.queue.append({
'operation': operation,
'resources': resources,
'data': _data
})
for subscription in self.subscriptions:
try:
if subscription.disconnected_ts and time.time() - subscription.disconnected_ts > WS_QUEUE_TTL_SECONDS:
await self.unsubscribe_from(subscription.ws, subscription.uuid)
else:
await self._event_subscription(subscription, operation, resources, _data)
except ConnectionResetError:
self.logger.debug("Trying to broadcast to a stale subscription. Unsubscribing")
await self.unsubscribe_from(subscription.ws, subscription.uuid)
except Exception:
self.logger.exception("Broadcasting to subscription failed")
async def _event_subscription(self, subscription: WSSubscription, operation: str, resources: List[str], data: Dict):
for resource in resources:
if subscription.resource == resource:
# Check if possible filters match this event
# only if the subscription actually provided conditions.
if subscription.filter:
filters_match_request = subscription.filter(data)
else:
filters_match_request = True
if filters_match_request:
payload = {'type': operation, 'uuid': subscription.uuid,
'resource': resource, 'data': data}
await subscription.ws.send_str(json.dumps(payload))
async def subscribe_to(self, ws, uuid: str, resource: str, since: int):
# Always unsubscribe existing duplicate identifiers
await self.unsubscribe_from(ws, uuid)
# Create new subscription
_resource, query, filter_fn = resource_conditions(resource)
subscription = WSSubscription(
ws=ws, fullpath=resource, resource=_resource, query=query, uuid=uuid,
filter=filter_fn, disconnected_ts=None)
self.subscriptions.append(subscription)
# Send previous events that client might have missed due to disconnection
if since:
# Subtract 1 second to make sure all events are included
event_queue = await self.queue.values_since(since)
for _, event in event_queue:
self.loop.create_task(
self._event_subscription(subscription, event['operation'], event['resources'], event['data'])
)
async def unsubscribe_from(self, ws, uuid: str = None):
if uuid:
self.subscriptions = list(
filter(lambda s: uuid != s.uuid or ws != s.ws, self.subscriptions))
else:
self.subscriptions = list(
filter(lambda s: ws != s.ws, self.subscriptions))
async def handle_disconnect(self, ws):
"""
Sets disconnected timestamp on websocket subscription without removing it from the list.
Removing is handled by event_handler that checks for expired subscriptions before emitting
"""
self.subscriptions = list(
map(
lambda sub: sub._replace(disconnected_ts=time.time()) if sub.ws == ws else sub,
self.subscriptions)
)
async def websocket_handler(self, request):
"Handler for received messages from the open Web Socket connection."
# TODO: Consider using options autoping=True and heartbeat=20 if supported by clients.
ws = web.WebSocketResponse()
await ws.prepare(request)
while not ws.closed:
async for msg in ws:
if msg.type == WSMsgType.TEXT:
try:
# Custom ping message handling.
# If someone is pinging, lets answer with pong rightaway.
if msg.data == "__ping__":
await ws.send_str("__pong__")
else:
payload = json.loads(msg.data)
op_type = payload.get("type")
resource = payload.get("resource")
uuid = payload.get("uuid")
since = payload.get("since")
if since is not None and str(since).isnumeric():
since = int(since)
else:
since = None
if op_type == SUBSCRIBE and uuid and resource:
await self.subscribe_to(ws, uuid, resource, since)
elif op_type == UNSUBSCRIBE and uuid:
await self.unsubscribe_from(ws, uuid)
except Exception:
self.logger.exception("Exception occurred.")
# Always remove clients from listeners
await self.handle_disconnect(ws)
return ws
@throttle_simultaneous(count=8)
async def get_table_postprocessor(self, table_name):
if table_name == self.db.task_table_postgres.table_name:
return self.task_refiner.postprocess
elif table_name == self.db.artifact_table_postgres.table_name:
return self.artifact_refiner.postprocess
else:
return None
async def load_data_from_db(table, data: Dict[str, Any],
filter_dict: Dict = {},
postprocess: Callable = None):
# filter the data for loading based on available primary keys
conditions_dict = {
key: data[key] for key in table.primary_keys
if key in data
}
filter_dict = {**conditions_dict, **filter_dict}
conditions, values = [], []
for k, v in filter_dict.items():
conditions.append("{} = %s".format(k))
values.append(v)
results, *_ = await table.find_records(
conditions=conditions, values=values, fetch_single=True,
enable_joins=True,
expanded=True,
postprocess=postprocess
)
return results.body
| 45.275 | 132 | 0.61697 | import os
import json
import time
import asyncio
import collections
from aiohttp import web, WSMsgType
from typing import List, Dict, Any, Callable
from .utils import resource_conditions, TTLQueue
from services.utils import logging
from pyee import AsyncIOEventEmitter
from ..data.refiner import TaskRefiner, ArtifactRefiner
from throttler import throttle_simultaneous
WS_QUEUE_TTL_SECONDS = os.environ.get("WS_QUEUE_TTL_SECONDS", 60 * 5)
WS_POSTPROCESS_CONCURRENCY_LIMIT = int(os.environ.get("WS_POSTPROCESS_CONCURRENCY_LIMIT", 8))
SUBSCRIBE = 'SUBSCRIBE'
UNSUBSCRIBE = 'UNSUBSCRIBE'
WSSubscription = collections.namedtuple(
"WSSubscription", "ws disconnected_ts fullpath resource query uuid filter")
class Websocket(object):
subscriptions: List[WSSubscription] = []
def __init__(self, app, db, event_emitter=None, queue_ttl: int = WS_QUEUE_TTL_SECONDS, cache=None):
self.event_emitter = event_emitter or AsyncIOEventEmitter()
self.db = db
self.queue = TTLQueue(queue_ttl)
self.task_refiner = TaskRefiner(cache=cache.artifact_cache) if cache else None
self.artifact_refiner = ArtifactRefiner(cache=cache.artifact_cache) if cache else None
self.logger = logging.getLogger("Websocket")
event_emitter.on('notify', self.event_handler)
app.router.add_route('GET', '/ws', self.websocket_handler)
self.loop = asyncio.get_event_loop()
async def event_handler(self, operation: str, resources: List[str], data: Dict, table_name: str = None, filter_dict: Dict = {}):
if any(subscription.resource in resources for subscription in self.subscriptions):
if table_name:
table = self.db.get_table_by_name(table_name)
_postprocess = await self.get_table_postprocessor(table_name)
_data = await load_data_from_db(table, data, filter_dict, postprocess=_postprocess)
else:
_data = data
if not _data:
return
await self.queue.append({
'operation': operation,
'resources': resources,
'data': _data
})
for subscription in self.subscriptions:
try:
if subscription.disconnected_ts and time.time() - subscription.disconnected_ts > WS_QUEUE_TTL_SECONDS:
await self.unsubscribe_from(subscription.ws, subscription.uuid)
else:
await self._event_subscription(subscription, operation, resources, _data)
except ConnectionResetError:
self.logger.debug("Trying to broadcast to a stale subscription. Unsubscribing")
await self.unsubscribe_from(subscription.ws, subscription.uuid)
except Exception:
self.logger.exception("Broadcasting to subscription failed")
async def _event_subscription(self, subscription: WSSubscription, operation: str, resources: List[str], data: Dict):
for resource in resources:
if subscription.resource == resource:
if subscription.filter:
filters_match_request = subscription.filter(data)
else:
filters_match_request = True
if filters_match_request:
payload = {'type': operation, 'uuid': subscription.uuid,
'resource': resource, 'data': data}
await subscription.ws.send_str(json.dumps(payload))
async def subscribe_to(self, ws, uuid: str, resource: str, since: int):
await self.unsubscribe_from(ws, uuid)
_resource, query, filter_fn = resource_conditions(resource)
subscription = WSSubscription(
ws=ws, fullpath=resource, resource=_resource, query=query, uuid=uuid,
filter=filter_fn, disconnected_ts=None)
self.subscriptions.append(subscription)
if since:
event_queue = await self.queue.values_since(since)
for _, event in event_queue:
self.loop.create_task(
self._event_subscription(subscription, event['operation'], event['resources'], event['data'])
)
async def unsubscribe_from(self, ws, uuid: str = None):
if uuid:
self.subscriptions = list(
filter(lambda s: uuid != s.uuid or ws != s.ws, self.subscriptions))
else:
self.subscriptions = list(
filter(lambda s: ws != s.ws, self.subscriptions))
async def handle_disconnect(self, ws):
self.subscriptions = list(
map(
lambda sub: sub._replace(disconnected_ts=time.time()) if sub.ws == ws else sub,
self.subscriptions)
)
async def websocket_handler(self, request):
ws = web.WebSocketResponse()
await ws.prepare(request)
while not ws.closed:
async for msg in ws:
if msg.type == WSMsgType.TEXT:
try:
if msg.data == "__ping__":
await ws.send_str("__pong__")
else:
payload = json.loads(msg.data)
op_type = payload.get("type")
resource = payload.get("resource")
uuid = payload.get("uuid")
since = payload.get("since")
if since is not None and str(since).isnumeric():
since = int(since)
else:
since = None
if op_type == SUBSCRIBE and uuid and resource:
await self.subscribe_to(ws, uuid, resource, since)
elif op_type == UNSUBSCRIBE and uuid:
await self.unsubscribe_from(ws, uuid)
except Exception:
self.logger.exception("Exception occurred.")
await self.handle_disconnect(ws)
return ws
@throttle_simultaneous(count=8)
async def get_table_postprocessor(self, table_name):
if table_name == self.db.task_table_postgres.table_name:
return self.task_refiner.postprocess
elif table_name == self.db.artifact_table_postgres.table_name:
return self.artifact_refiner.postprocess
else:
return None
async def load_data_from_db(table, data: Dict[str, Any],
filter_dict: Dict = {},
postprocess: Callable = None):
conditions_dict = {
key: data[key] for key in table.primary_keys
if key in data
}
filter_dict = {**conditions_dict, **filter_dict}
conditions, values = [], []
for k, v in filter_dict.items():
conditions.append("{} = %s".format(k))
values.append(v)
results, *_ = await table.find_records(
conditions=conditions, values=values, fetch_single=True,
enable_joins=True,
expanded=True,
postprocess=postprocess
)
return results.body
| true | true |
f73d1b6f0a863cbf844b200b6f9bfbc5c66038a3 | 1,845 | py | Python | ooobuild/dyn/script/finish_reason.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/script/finish_reason.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/script/finish_reason.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Enum Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.script
# Libre Office Version: 7.3
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from ooo.helper.enum_helper import uno_enum_class_new
from com.sun.star.script.FinishReason import (Cancel, Error, OK)
def _get_enum():
# Dynamically create class that actually contains UNO enum instances
_dict = {
"__doc__": "Dynamically created class that represents com.sun.star.script.FinishReason Enum. Class loosly mimics Enum",
"__new__": uno_enum_class_new,
"__ooo_ns__": "com.sun.star.script",
"__ooo_full_ns__": "com.sun.star.script.FinishReason",
"__ooo_type_name__": "enum",
"Cancel": Cancel,
"Error": Error,
"OK": OK,
}
result = type('FinishReason', (object,), _dict)
return result
FinishReason = _get_enum()
else:
from ...lo.script.finish_reason import FinishReason as FinishReason
__all__ = ['FinishReason']
| 35.480769 | 131 | 0.702981 |
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from ooo.helper.enum_helper import uno_enum_class_new
from com.sun.star.script.FinishReason import (Cancel, Error, OK)
def _get_enum():
_dict = {
"__doc__": "Dynamically created class that represents com.sun.star.script.FinishReason Enum. Class loosly mimics Enum",
"__new__": uno_enum_class_new,
"__ooo_ns__": "com.sun.star.script",
"__ooo_full_ns__": "com.sun.star.script.FinishReason",
"__ooo_type_name__": "enum",
"Cancel": Cancel,
"Error": Error,
"OK": OK,
}
result = type('FinishReason', (object,), _dict)
return result
FinishReason = _get_enum()
else:
from ...lo.script.finish_reason import FinishReason as FinishReason
__all__ = ['FinishReason']
| true | true |
f73d1ce0c34183a433022f4bce2769a6211ba93a | 133,829 | py | Python | google_appengine/google/appengine/datastore/entity_pb.py | katoakira/python_study | 7ee7f3658129a918f97067be5c087ef340cc2b0b | [
"MIT"
] | null | null | null | google_appengine/google/appengine/datastore/entity_pb.py | katoakira/python_study | 7ee7f3658129a918f97067be5c087ef340cc2b0b | [
"MIT"
] | null | null | null | google_appengine/google/appengine/datastore/entity_pb.py | katoakira/python_study | 7ee7f3658129a918f97067be5c087ef340cc2b0b | [
"MIT"
] | 2 | 2020-07-25T05:03:06.000Z | 2020-11-04T04:55:57.000Z | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_type_):
n += 1
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def OutputPartial(self, out):
if (self.has_type_):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 116: break
if tt == 122:
self.set_type(d.getPrefixedString())
continue
if tt == 128:
self.set_id(d.getVarInt64())
continue
if tt == 138:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage):
has_x_ = 0
x_ = 0.0
has_y_ = 0
y_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def x(self): return self.x_
def set_x(self, x):
self.has_x_ = 1
self.x_ = x
def clear_x(self):
if self.has_x_:
self.has_x_ = 0
self.x_ = 0.0
def has_x(self): return self.has_x_
def y(self): return self.y_
def set_y(self, x):
self.has_y_ = 1
self.y_ = x
def clear_y(self):
if self.has_y_:
self.has_y_ = 0
self.y_ = 0.0
def has_y(self): return self.has_y_
def MergeFrom(self, x):
assert x is not self
if (x.has_x()): self.set_x(x.x())
if (x.has_y()): self.set_y(x.y())
def Equals(self, x):
if x is self: return 1
if self.has_x_ != x.has_x_: return 0
if self.has_x_ and self.x_ != x.x_: return 0
if self.has_y_ != x.has_y_: return 0
if self.has_y_ and self.y_ != x.y_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_x_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: x not set.')
if (not self.has_y_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: y not set.')
return initialized
def ByteSize(self):
n = 0
return n + 18
def ByteSizePartial(self):
n = 0
if (self.has_x_):
n += 9
if (self.has_y_):
n += 9
return n
def Clear(self):
self.clear_x()
self.clear_y()
def OutputUnchecked(self, out):
out.putVarInt32(49)
out.putDouble(self.x_)
out.putVarInt32(57)
out.putDouble(self.y_)
def OutputPartial(self, out):
if (self.has_x_):
out.putVarInt32(49)
out.putDouble(self.x_)
if (self.has_y_):
out.putVarInt32(57)
out.putDouble(self.y_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 49:
self.set_x(d.getDouble())
continue
if tt == 57:
self.set_y(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_x_: res+=prefix+("x: %s\n" % self.DebugFormat(self.x_))
if self.has_y_: res+=prefix+("y: %s\n" % self.DebugFormat(self.y_))
return res
class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
has_obfuscated_gaiaid_ = 0
obfuscated_gaiaid_ = ""
has_federated_identity_ = 0
federated_identity_ = ""
has_federated_provider_ = 0
federated_provider_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
def set_obfuscated_gaiaid(self, x):
self.has_obfuscated_gaiaid_ = 1
self.obfuscated_gaiaid_ = x
def clear_obfuscated_gaiaid(self):
if self.has_obfuscated_gaiaid_:
self.has_obfuscated_gaiaid_ = 0
self.obfuscated_gaiaid_ = ""
def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
def federated_identity(self): return self.federated_identity_
def set_federated_identity(self, x):
self.has_federated_identity_ = 1
self.federated_identity_ = x
def clear_federated_identity(self):
if self.has_federated_identity_:
self.has_federated_identity_ = 0
self.federated_identity_ = ""
def has_federated_identity(self): return self.has_federated_identity_
def federated_provider(self): return self.federated_provider_
def set_federated_provider(self, x):
self.has_federated_provider_ = 1
self.federated_provider_ = x
def clear_federated_provider(self):
if self.has_federated_provider_:
self.has_federated_provider_ = 0
self.federated_provider_ = ""
def has_federated_provider(self): return self.has_federated_provider_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
if (x.has_federated_identity()): self.set_federated_identity(x.federated_identity())
if (x.has_federated_provider()): self.set_federated_provider(x.federated_provider())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
if self.has_federated_identity_ != x.has_federated_identity_: return 0
if self.has_federated_identity_ and self.federated_identity_ != x.federated_identity_: return 0
if self.has_federated_provider_ != x.has_federated_provider_: return 0
if self.has_federated_provider_ and self.federated_provider_ != x.federated_provider_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 2 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 2 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 2 + self.lengthString(len(self.federated_provider_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_email_):
n += 1
n += self.lengthString(len(self.email_))
if (self.has_auth_domain_):
n += 1
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
if (self.has_gaiaid_):
n += 2
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 2 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 2 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 2 + self.lengthString(len(self.federated_provider_))
return n
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
self.clear_obfuscated_gaiaid()
self.clear_federated_identity()
self.clear_federated_provider()
def OutputUnchecked(self, out):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(154)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(170)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(178)
out.putPrefixedString(self.federated_provider_)
def OutputPartial(self, out):
if (self.has_email_):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
if (self.has_auth_domain_):
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
if (self.has_gaiaid_):
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(154)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(170)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(178)
out.putPrefixedString(self.federated_provider_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 68: break
if tt == 74:
self.set_email(d.getPrefixedString())
continue
if tt == 82:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 90:
self.set_nickname(d.getPrefixedString())
continue
if tt == 144:
self.set_gaiaid(d.getVarInt64())
continue
if tt == 154:
self.set_obfuscated_gaiaid(d.getPrefixedString())
continue
if tt == 170:
self.set_federated_identity(d.getPrefixedString())
continue
if tt == 178:
self.set_federated_provider(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
if self.has_federated_identity_: res+=prefix+("federated_identity: %s\n" % self.DebugFormatString(self.federated_identity_))
if self.has_federated_provider_: res+=prefix+("federated_provider: %s\n" % self.DebugFormatString(self.federated_provider_))
return res
class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
def __init__(self, contents=None):
self.pathelement_ = []
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def pathelement_size(self): return len(self.pathelement_)
def pathelement_list(self): return self.pathelement_
def pathelement(self, i):
return self.pathelement_[i]
def mutable_pathelement(self, i):
return self.pathelement_[i]
def add_pathelement(self):
x = PropertyValue_ReferenceValuePathElement()
self.pathelement_.append(x)
return x
def clear_pathelement(self):
self.pathelement_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
for i in xrange(x.pathelement_size()): self.add_pathelement().CopyFrom(x.pathelement(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if len(self.pathelement_) != len(x.pathelement_): return 0
for e1, e2 in zip(self.pathelement_, x.pathelement_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
for p in self.pathelement_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSize()
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_app()
self.clear_name_space()
self.clear_pathelement()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputUnchecked(out)
out.putVarInt32(116)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputPartial(out)
out.putVarInt32(116)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 115:
self.add_pathelement().TryMerge(d)
continue
if tt == 162:
self.set_name_space(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
cnt=0
for e in self.pathelement_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("PathElement%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
class PropertyValue(ProtocolBuffer.ProtocolMessage):
has_int64value_ = 0
int64value_ = 0
has_booleanvalue_ = 0
booleanvalue_ = 0
has_stringvalue_ = 0
stringvalue_ = ""
has_doublevalue_ = 0
doublevalue_ = 0.0
has_pointvalue_ = 0
pointvalue_ = None
has_uservalue_ = 0
uservalue_ = None
has_referencevalue_ = 0
referencevalue_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def int64value(self): return self.int64value_
def set_int64value(self, x):
self.has_int64value_ = 1
self.int64value_ = x
def clear_int64value(self):
if self.has_int64value_:
self.has_int64value_ = 0
self.int64value_ = 0
def has_int64value(self): return self.has_int64value_
def booleanvalue(self): return self.booleanvalue_
def set_booleanvalue(self, x):
self.has_booleanvalue_ = 1
self.booleanvalue_ = x
def clear_booleanvalue(self):
if self.has_booleanvalue_:
self.has_booleanvalue_ = 0
self.booleanvalue_ = 0
def has_booleanvalue(self): return self.has_booleanvalue_
def stringvalue(self): return self.stringvalue_
def set_stringvalue(self, x):
self.has_stringvalue_ = 1
self.stringvalue_ = x
def clear_stringvalue(self):
if self.has_stringvalue_:
self.has_stringvalue_ = 0
self.stringvalue_ = ""
def has_stringvalue(self): return self.has_stringvalue_
def doublevalue(self): return self.doublevalue_
def set_doublevalue(self, x):
self.has_doublevalue_ = 1
self.doublevalue_ = x
def clear_doublevalue(self):
if self.has_doublevalue_:
self.has_doublevalue_ = 0
self.doublevalue_ = 0.0
def has_doublevalue(self): return self.has_doublevalue_
def pointvalue(self):
if self.pointvalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.pointvalue_ is None: self.pointvalue_ = PropertyValue_PointValue()
finally:
self.lazy_init_lock_.release()
return self.pointvalue_
def mutable_pointvalue(self): self.has_pointvalue_ = 1; return self.pointvalue()
def clear_pointvalue(self):
if self.has_pointvalue_:
self.has_pointvalue_ = 0;
if self.pointvalue_ is not None: self.pointvalue_.Clear()
def has_pointvalue(self): return self.has_pointvalue_
def uservalue(self):
if self.uservalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.uservalue_ is None: self.uservalue_ = PropertyValue_UserValue()
finally:
self.lazy_init_lock_.release()
return self.uservalue_
def mutable_uservalue(self): self.has_uservalue_ = 1; return self.uservalue()
def clear_uservalue(self):
if self.has_uservalue_:
self.has_uservalue_ = 0;
if self.uservalue_ is not None: self.uservalue_.Clear()
def has_uservalue(self): return self.has_uservalue_
def referencevalue(self):
if self.referencevalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.referencevalue_ is None: self.referencevalue_ = PropertyValue_ReferenceValue()
finally:
self.lazy_init_lock_.release()
return self.referencevalue_
def mutable_referencevalue(self): self.has_referencevalue_ = 1; return self.referencevalue()
def clear_referencevalue(self):
if self.has_referencevalue_:
self.has_referencevalue_ = 0;
if self.referencevalue_ is not None: self.referencevalue_.Clear()
def has_referencevalue(self): return self.has_referencevalue_
def MergeFrom(self, x):
assert x is not self
if (x.has_int64value()): self.set_int64value(x.int64value())
if (x.has_booleanvalue()): self.set_booleanvalue(x.booleanvalue())
if (x.has_stringvalue()): self.set_stringvalue(x.stringvalue())
if (x.has_doublevalue()): self.set_doublevalue(x.doublevalue())
if (x.has_pointvalue()): self.mutable_pointvalue().MergeFrom(x.pointvalue())
if (x.has_uservalue()): self.mutable_uservalue().MergeFrom(x.uservalue())
if (x.has_referencevalue()): self.mutable_referencevalue().MergeFrom(x.referencevalue())
def Equals(self, x):
if x is self: return 1
if self.has_int64value_ != x.has_int64value_: return 0
if self.has_int64value_ and self.int64value_ != x.int64value_: return 0
if self.has_booleanvalue_ != x.has_booleanvalue_: return 0
if self.has_booleanvalue_ and self.booleanvalue_ != x.booleanvalue_: return 0
if self.has_stringvalue_ != x.has_stringvalue_: return 0
if self.has_stringvalue_ and self.stringvalue_ != x.stringvalue_: return 0
if self.has_doublevalue_ != x.has_doublevalue_: return 0
if self.has_doublevalue_ and self.doublevalue_ != x.doublevalue_: return 0
if self.has_pointvalue_ != x.has_pointvalue_: return 0
if self.has_pointvalue_ and self.pointvalue_ != x.pointvalue_: return 0
if self.has_uservalue_ != x.has_uservalue_: return 0
if self.has_uservalue_ and self.uservalue_ != x.uservalue_: return 0
if self.has_referencevalue_ != x.has_referencevalue_: return 0
if self.has_referencevalue_ and self.referencevalue_ != x.referencevalue_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_pointvalue_ and not self.pointvalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_uservalue_ and not self.uservalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_referencevalue_ and not self.referencevalue_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSize()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSize()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSize()
return n
def ByteSizePartial(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSizePartial()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSizePartial()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSizePartial()
return n
def Clear(self):
self.clear_int64value()
self.clear_booleanvalue()
self.clear_stringvalue()
self.clear_doublevalue()
self.clear_pointvalue()
self.clear_uservalue()
self.clear_referencevalue()
def OutputUnchecked(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputUnchecked(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputUnchecked(out)
out.putVarInt32(100)
def OutputPartial(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputPartial(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputPartial(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputPartial(out)
out.putVarInt32(100)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_int64value(d.getVarInt64())
continue
if tt == 16:
self.set_booleanvalue(d.getBoolean())
continue
if tt == 26:
self.set_stringvalue(d.getPrefixedString())
continue
if tt == 33:
self.set_doublevalue(d.getDouble())
continue
if tt == 43:
self.mutable_pointvalue().TryMerge(d)
continue
if tt == 67:
self.mutable_uservalue().TryMerge(d)
continue
if tt == 99:
self.mutable_referencevalue().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_int64value_: res+=prefix+("int64Value: %s\n" % self.DebugFormatInt64(self.int64value_))
if self.has_booleanvalue_: res+=prefix+("booleanValue: %s\n" % self.DebugFormatBool(self.booleanvalue_))
if self.has_stringvalue_: res+=prefix+("stringValue: %s\n" % self.DebugFormatString(self.stringvalue_))
if self.has_doublevalue_: res+=prefix+("doubleValue: %s\n" % self.DebugFormat(self.doublevalue_))
if self.has_pointvalue_:
res+=prefix+"PointValue {\n"
res+=self.pointvalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_uservalue_:
res+=prefix+"UserValue {\n"
res+=self.uservalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_referencevalue_:
res+=prefix+"ReferenceValue {\n"
res+=self.referencevalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kint64Value = 1
kbooleanValue = 2
kstringValue = 3
kdoubleValue = 4
kPointValueGroup = 5
kPointValuex = 6
kPointValuey = 7
kUserValueGroup = 8
kUserValueemail = 9
kUserValueauth_domain = 10
kUserValuenickname = 11
kUserValuegaiaid = 18
kUserValueobfuscated_gaiaid = 19
kUserValuefederated_identity = 21
kUserValuefederated_provider = 22
kReferenceValueGroup = 12
kReferenceValueapp = 13
kReferenceValuename_space = 20
kReferenceValuePathElementGroup = 14
kReferenceValuePathElementtype = 15
kReferenceValuePathElementid = 16
kReferenceValuePathElementname = 17
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "int64Value",
2: "booleanValue",
3: "stringValue",
4: "doubleValue",
5: "PointValue",
6: "x",
7: "y",
8: "UserValue",
9: "email",
10: "auth_domain",
11: "nickname",
12: "ReferenceValue",
13: "app",
14: "PathElement",
15: "type",
16: "id",
17: "name",
18: "gaiaid",
19: "obfuscated_gaiaid",
20: "name_space",
21: "federated_identity",
22: "federated_provider",
}, 22)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.DOUBLE,
7: ProtocolBuffer.Encoder.DOUBLE,
8: ProtocolBuffer.Encoder.STARTGROUP,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STARTGROUP,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STARTGROUP,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
}, 22, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.PropertyValue'
class Property(ProtocolBuffer.ProtocolMessage):
NO_MEANING = 0
BLOB = 14
TEXT = 15
BYTESTRING = 16
ATOM_CATEGORY = 1
ATOM_LINK = 2
ATOM_TITLE = 3
ATOM_CONTENT = 4
ATOM_SUMMARY = 5
ATOM_AUTHOR = 6
GD_WHEN = 7
GD_EMAIL = 8
GEORSS_POINT = 9
GD_IM = 10
GD_PHONENUMBER = 11
GD_POSTALADDRESS = 12
GD_RATING = 13
BLOBKEY = 17
ENTITY_PROTO = 19
INDEX_VALUE = 18
_Meaning_NAMES = {
0: "NO_MEANING",
14: "BLOB",
15: "TEXT",
16: "BYTESTRING",
1: "ATOM_CATEGORY",
2: "ATOM_LINK",
3: "ATOM_TITLE",
4: "ATOM_CONTENT",
5: "ATOM_SUMMARY",
6: "ATOM_AUTHOR",
7: "GD_WHEN",
8: "GD_EMAIL",
9: "GEORSS_POINT",
10: "GD_IM",
11: "GD_PHONENUMBER",
12: "GD_POSTALADDRESS",
13: "GD_RATING",
17: "BLOBKEY",
19: "ENTITY_PROTO",
18: "INDEX_VALUE",
}
def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
Meaning_Name = classmethod(Meaning_Name)
has_meaning_ = 0
meaning_ = 0
has_meaning_uri_ = 0
meaning_uri_ = ""
has_name_ = 0
name_ = ""
has_value_ = 0
has_multiple_ = 0
multiple_ = 0
has_embedded_ = 0
embedded_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def meaning_uri(self): return self.meaning_uri_
def set_meaning_uri(self, x):
self.has_meaning_uri_ = 1
self.meaning_uri_ = x
def clear_meaning_uri(self):
if self.has_meaning_uri_:
self.has_meaning_uri_ = 0
self.meaning_uri_ = ""
def has_meaning_uri(self): return self.has_meaning_uri_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def multiple(self): return self.multiple_
def set_multiple(self, x):
self.has_multiple_ = 1
self.multiple_ = x
def clear_multiple(self):
if self.has_multiple_:
self.has_multiple_ = 0
self.multiple_ = 0
def has_multiple(self): return self.has_multiple_
def embedded(self): return self.embedded_
def set_embedded(self, x):
self.has_embedded_ = 1
self.embedded_ = x
def clear_embedded(self):
if self.has_embedded_:
self.has_embedded_ = 0
self.embedded_ = 0
def has_embedded(self): return self.has_embedded_
def MergeFrom(self, x):
assert x is not self
if (x.has_meaning()): self.set_meaning(x.meaning())
if (x.has_meaning_uri()): self.set_meaning_uri(x.meaning_uri())
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
if (x.has_multiple()): self.set_multiple(x.multiple())
if (x.has_embedded()): self.set_embedded(x.embedded())
def Equals(self, x):
if x is self: return 1
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
if self.has_meaning_uri_ != x.has_meaning_uri_: return 0
if self.has_meaning_uri_ and self.meaning_uri_ != x.meaning_uri_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_multiple_ != x.has_multiple_: return 0
if self.has_multiple_ and self.multiple_ != x.multiple_: return 0
if self.has_embedded_ != x.has_embedded_: return 0
if self.has_embedded_ and self.embedded_ != x.embedded_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
if (not self.has_multiple_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: multiple not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
n += self.lengthString(len(self.name_))
n += self.lengthString(self.value_.ByteSize())
if (self.has_embedded_): n += 2
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_value_):
n += 1
n += self.lengthString(self.value_.ByteSizePartial())
if (self.has_multiple_):
n += 2
if (self.has_embedded_): n += 2
return n
def Clear(self):
self.clear_meaning()
self.clear_meaning_uri()
self.clear_name()
self.clear_value()
self.clear_multiple()
self.clear_embedded()
def OutputUnchecked(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
out.putVarInt32(26)
out.putPrefixedString(self.name_)
out.putVarInt32(32)
out.putBoolean(self.multiple_)
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
if (self.has_embedded_):
out.putVarInt32(48)
out.putBoolean(self.embedded_)
def OutputPartial(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_multiple_):
out.putVarInt32(32)
out.putBoolean(self.multiple_)
if (self.has_value_):
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
if (self.has_embedded_):
out.putVarInt32(48)
out.putBoolean(self.embedded_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_meaning(d.getVarInt32())
continue
if tt == 18:
self.set_meaning_uri(d.getPrefixedString())
continue
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_multiple(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if tt == 48:
self.set_embedded(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
if self.has_meaning_uri_: res+=prefix+("meaning_uri: %s\n" % self.DebugFormatString(self.meaning_uri_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
if self.has_embedded_: res+=prefix+("embedded: %s\n" % self.DebugFormatBool(self.embedded_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmeaning = 1
kmeaning_uri = 2
kname = 3
kvalue = 5
kmultiple = 4
kembedded = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "meaning",
2: "meaning_uri",
3: "name",
4: "multiple",
5: "value",
6: "embedded",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Property'
class Path_Element(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_type_):
n += 1
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def OutputPartial(self, out):
if (self.has_type_):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_type(d.getPrefixedString())
continue
if tt == 24:
self.set_id(d.getVarInt64())
continue
if tt == 34:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class Path(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.element_ = []
if contents is not None: self.MergeFromString(contents)
def element_size(self): return len(self.element_)
def element_list(self): return self.element_
def element(self, i):
return self.element_[i]
def mutable_element(self, i):
return self.element_[i]
def add_element(self):
x = Path_Element()
self.element_.append(x)
return x
def clear_element(self):
self.element_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.element_size()): self.add_element().CopyFrom(x.element(i))
def Equals(self, x):
if x is self: return 1
if len(self.element_) != len(x.element_): return 0
for e1, e2 in zip(self.element_, x.element_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.element_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_element()
def OutputUnchecked(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_element().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.element_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Element%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kElementGroup = 1
kElementtype = 2
kElementid = 3
kElementname = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Element",
2: "type",
3: "id",
4: "name",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Path'
class Reference(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
has_path_ = 0
def __init__(self, contents=None):
self.path_ = Path()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def path(self): return self.path_
def mutable_path(self): self.has_path_ = 1; return self.path_
def clear_path(self):self.has_path_ = 0; self.path_.Clear()
def has_path(self): return self.has_path_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
if (x.has_path()): self.mutable_path().MergeFrom(x.path())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
elif not self.path_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += self.lengthString(self.path_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_path_):
n += 1
n += self.lengthString(self.path_.ByteSizePartial())
return n
def Clear(self):
self.clear_app()
self.clear_name_space()
self.clear_path()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSize())
self.path_.OutputUnchecked(out)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
if (self.has_path_):
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSizePartial())
self.path_.OutputPartial(out)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_path().TryMerge(tmp)
continue
if tt == 162:
self.set_name_space(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
if self.has_path_:
res+=prefix+"path <\n"
res+=self.path_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp = 13
kname_space = 20
kpath = 14
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
13: "app",
14: "path",
20: "name_space",
}, 20)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
}, 20, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Reference'
class User(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
has_obfuscated_gaiaid_ = 0
obfuscated_gaiaid_ = ""
has_federated_identity_ = 0
federated_identity_ = ""
has_federated_provider_ = 0
federated_provider_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
def set_obfuscated_gaiaid(self, x):
self.has_obfuscated_gaiaid_ = 1
self.obfuscated_gaiaid_ = x
def clear_obfuscated_gaiaid(self):
if self.has_obfuscated_gaiaid_:
self.has_obfuscated_gaiaid_ = 0
self.obfuscated_gaiaid_ = ""
def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
def federated_identity(self): return self.federated_identity_
def set_federated_identity(self, x):
self.has_federated_identity_ = 1
self.federated_identity_ = x
def clear_federated_identity(self):
if self.has_federated_identity_:
self.has_federated_identity_ = 0
self.federated_identity_ = ""
def has_federated_identity(self): return self.has_federated_identity_
def federated_provider(self): return self.federated_provider_
def set_federated_provider(self, x):
self.has_federated_provider_ = 1
self.federated_provider_ = x
def clear_federated_provider(self):
if self.has_federated_provider_:
self.has_federated_provider_ = 0
self.federated_provider_ = ""
def has_federated_provider(self): return self.has_federated_provider_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
if (x.has_federated_identity()): self.set_federated_identity(x.federated_identity())
if (x.has_federated_provider()): self.set_federated_provider(x.federated_provider())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
if self.has_federated_identity_ != x.has_federated_identity_: return 0
if self.has_federated_identity_ and self.federated_identity_ != x.federated_identity_: return 0
if self.has_federated_provider_ != x.has_federated_provider_: return 0
if self.has_federated_provider_ and self.federated_provider_ != x.federated_provider_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 1 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 1 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 1 + self.lengthString(len(self.federated_provider_))
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_email_):
n += 1
n += self.lengthString(len(self.email_))
if (self.has_auth_domain_):
n += 1
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
if (self.has_gaiaid_):
n += 1
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 1 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 1 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 1 + self.lengthString(len(self.federated_provider_))
return n
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
self.clear_obfuscated_gaiaid()
self.clear_federated_identity()
self.clear_federated_provider()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(42)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(50)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(58)
out.putPrefixedString(self.federated_provider_)
def OutputPartial(self, out):
if (self.has_email_):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
if (self.has_auth_domain_):
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
if (self.has_gaiaid_):
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(42)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(50)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(58)
out.putPrefixedString(self.federated_provider_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_email(d.getPrefixedString())
continue
if tt == 18:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 26:
self.set_nickname(d.getPrefixedString())
continue
if tt == 32:
self.set_gaiaid(d.getVarInt64())
continue
if tt == 42:
self.set_obfuscated_gaiaid(d.getPrefixedString())
continue
if tt == 50:
self.set_federated_identity(d.getPrefixedString())
continue
if tt == 58:
self.set_federated_provider(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
if self.has_federated_identity_: res+=prefix+("federated_identity: %s\n" % self.DebugFormatString(self.federated_identity_))
if self.has_federated_provider_: res+=prefix+("federated_provider: %s\n" % self.DebugFormatString(self.federated_provider_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kemail = 1
kauth_domain = 2
knickname = 3
kgaiaid = 4
kobfuscated_gaiaid = 5
kfederated_identity = 6
kfederated_provider = 7
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "email",
2: "auth_domain",
3: "nickname",
4: "gaiaid",
5: "obfuscated_gaiaid",
6: "federated_identity",
7: "federated_provider",
}, 7)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.STRING,
}, 7, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.User'
class EntityProto(ProtocolBuffer.ProtocolMessage):
GD_CONTACT = 1
GD_EVENT = 2
GD_MESSAGE = 3
_Kind_NAMES = {
1: "GD_CONTACT",
2: "GD_EVENT",
3: "GD_MESSAGE",
}
def Kind_Name(cls, x): return cls._Kind_NAMES.get(x, "")
Kind_Name = classmethod(Kind_Name)
has_key_ = 0
has_entity_group_ = 0
has_owner_ = 0
owner_ = None
has_kind_ = 0
kind_ = 0
has_kind_uri_ = 0
kind_uri_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
self.entity_group_ = Path()
self.property_ = []
self.raw_property_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def entity_group(self): return self.entity_group_
def mutable_entity_group(self): self.has_entity_group_ = 1; return self.entity_group_
def clear_entity_group(self):self.has_entity_group_ = 0; self.entity_group_.Clear()
def has_entity_group(self): return self.has_entity_group_
def owner(self):
if self.owner_ is None:
self.lazy_init_lock_.acquire()
try:
if self.owner_ is None: self.owner_ = User()
finally:
self.lazy_init_lock_.release()
return self.owner_
def mutable_owner(self): self.has_owner_ = 1; return self.owner()
def clear_owner(self):
if self.has_owner_:
self.has_owner_ = 0;
if self.owner_ is not None: self.owner_.Clear()
def has_owner(self): return self.has_owner_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = 0
def has_kind(self): return self.has_kind_
def kind_uri(self): return self.kind_uri_
def set_kind_uri(self, x):
self.has_kind_uri_ = 1
self.kind_uri_ = x
def clear_kind_uri(self):
if self.has_kind_uri_:
self.has_kind_uri_ = 0
self.kind_uri_ = ""
def has_kind_uri(self): return self.has_kind_uri_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def raw_property_size(self): return len(self.raw_property_)
def raw_property_list(self): return self.raw_property_
def raw_property(self, i):
return self.raw_property_[i]
def mutable_raw_property(self, i):
return self.raw_property_[i]
def add_raw_property(self):
x = Property()
self.raw_property_.append(x)
return x
def clear_raw_property(self):
self.raw_property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_entity_group()): self.mutable_entity_group().MergeFrom(x.entity_group())
if (x.has_owner()): self.mutable_owner().MergeFrom(x.owner())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_kind_uri()): self.set_kind_uri(x.kind_uri())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
for i in xrange(x.raw_property_size()): self.add_raw_property().CopyFrom(x.raw_property(i))
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_entity_group_ != x.has_entity_group_: return 0
if self.has_entity_group_ and self.entity_group_ != x.entity_group_: return 0
if self.has_owner_ != x.has_owner_: return 0
if self.has_owner_ and self.owner_ != x.owner_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_kind_uri_ != x.has_kind_uri_: return 0
if self.has_kind_uri_ and self.kind_uri_ != x.kind_uri_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
if len(self.raw_property_) != len(x.raw_property_): return 0
for e1, e2 in zip(self.raw_property_, x.raw_property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_entity_group_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_group not set.')
elif not self.entity_group_.IsInitialized(debug_strs): initialized = 0
if (self.has_owner_ and not self.owner_.IsInitialized(debug_strs)): initialized = 0
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.raw_property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
n += self.lengthString(self.entity_group_.ByteSize())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSize())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSize())
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(self.key_.ByteSizePartial())
if (self.has_entity_group_):
n += 2
n += self.lengthString(self.entity_group_.ByteSizePartial())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSizePartial())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_key()
self.clear_entity_group()
self.clear_owner()
self.clear_kind()
self.clear_kind_uri()
self.clear_property()
self.clear_raw_property()
def OutputUnchecked(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSize())
self.raw_property_[i].OutputUnchecked(out)
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSize())
self.entity_group_.OutputUnchecked(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSize())
self.owner_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
if (self.has_key_):
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSizePartial())
self.raw_property_[i].OutputPartial(out)
if (self.has_entity_group_):
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSizePartial())
self.entity_group_.OutputPartial(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSizePartial())
self.owner_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_kind(d.getVarInt32())
continue
if tt == 42:
self.set_kind_uri(d.getPrefixedString())
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if tt == 122:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_raw_property().TryMerge(tmp)
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity_group().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_owner().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_entity_group_:
res+=prefix+"entity_group <\n"
res+=self.entity_group_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_owner_:
res+=prefix+"owner <\n"
res+=self.owner_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatInt32(self.kind_))
if self.has_kind_uri_: res+=prefix+("kind_uri: %s\n" % self.DebugFormatString(self.kind_uri_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.raw_property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("raw_property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 13
kentity_group = 16
kowner = 17
kkind = 4
kkind_uri = 5
kproperty = 14
kraw_property = 15
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
4: "kind",
5: "kind_uri",
13: "key",
14: "property",
15: "raw_property",
16: "entity_group",
17: "owner",
}, 17)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
}, 17, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.EntityProto'
class CompositeProperty(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
for i in xrange(x.value_size()): self.add_value(x.value(i))
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n
def Clear(self):
self.clear_index_id()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeProperty'
class Index_Property(ProtocolBuffer.ProtocolMessage):
DIRECTION_UNSPECIFIED = 0
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
0: "DIRECTION_UNSPECIFIED",
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
MODE_UNSPECIFIED = 0
GEOSPATIAL = 3
_Mode_NAMES = {
0: "MODE_UNSPECIFIED",
3: "GEOSPATIAL",
}
def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
Mode_Name = classmethod(Mode_Name)
has_name_ = 0
name_ = ""
has_direction_ = 0
direction_ = 0
has_mode_ = 0
mode_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 0
def has_direction(self): return self.has_direction_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_direction()): self.set_direction(x.direction())
if (x.has_mode()): self.set_mode(x.mode())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
return n
def Clear(self):
self.clear_name()
self.clear_direction()
self.clear_mode()
def OutputUnchecked(self, out):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
if (self.has_mode_):
out.putVarInt32(48)
out.putVarInt32(self.mode_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
if (self.has_mode_):
out.putVarInt32(48)
out.putVarInt32(self.mode_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_direction(d.getVarInt32())
continue
if tt == 48:
self.set_mode(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
return res
class Index(ProtocolBuffer.ProtocolMessage):
has_entity_type_ = 0
entity_type_ = ""
has_ancestor_ = 0
ancestor_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def entity_type(self): return self.entity_type_
def set_entity_type(self, x):
self.has_entity_type_ = 1
self.entity_type_ = x
def clear_entity_type(self):
if self.has_entity_type_:
self.has_entity_type_ = 0
self.entity_type_ = ""
def has_entity_type(self): return self.has_entity_type_
def ancestor(self): return self.ancestor_
def set_ancestor(self, x):
self.has_ancestor_ = 1
self.ancestor_ = x
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0
self.ancestor_ = 0
def has_ancestor(self): return self.has_ancestor_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Index_Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_entity_type()): self.set_entity_type(x.entity_type())
if (x.has_ancestor()): self.set_ancestor(x.ancestor())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_entity_type_ != x.has_entity_type_: return 0
if self.has_entity_type_ and self.entity_type_ != x.entity_type_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_entity_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_type not set.')
if (not self.has_ancestor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: ancestor not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.entity_type_))
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSize()
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_entity_type_):
n += 1
n += self.lengthString(len(self.entity_type_))
if (self.has_ancestor_):
n += 2
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_entity_type()
self.clear_ancestor()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputUnchecked(out)
out.putVarInt32(20)
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def OutputPartial(self, out):
if (self.has_entity_type_):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputPartial(out)
out.putVarInt32(20)
if (self.has_ancestor_):
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_entity_type(d.getPrefixedString())
continue
if tt == 19:
self.add_property().TryMerge(d)
continue
if tt == 40:
self.set_ancestor(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_type_: res+=prefix+("entity_type: %s\n" % self.DebugFormatString(self.entity_type_))
if self.has_ancestor_: res+=prefix+("ancestor: %s\n" % self.DebugFormatBool(self.ancestor_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Property%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kentity_type = 1
kancestor = 5
kPropertyGroup = 2
kPropertyname = 3
kPropertydirection = 4
kPropertymode = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "entity_type",
2: "Property",
3: "name",
4: "direction",
5: "ancestor",
6: "mode",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Index'
class CompositeIndex(ProtocolBuffer.ProtocolMessage):
WRITE_ONLY = 1
READ_WRITE = 2
DELETED = 3
ERROR = 4
_State_NAMES = {
1: "WRITE_ONLY",
2: "READ_WRITE",
3: "DELETED",
4: "ERROR",
}
def State_Name(cls, x): return cls._State_NAMES.get(x, "")
State_Name = classmethod(State_Name)
PENDING = 1
ACTIVE = 2
COMPLETED = 3
_WorkflowState_NAMES = {
1: "PENDING",
2: "ACTIVE",
3: "COMPLETED",
}
def WorkflowState_Name(cls, x): return cls._WorkflowState_NAMES.get(x, "")
WorkflowState_Name = classmethod(WorkflowState_Name)
has_app_id_ = 0
app_id_ = ""
has_id_ = 0
id_ = 0
has_definition_ = 0
has_state_ = 0
state_ = 0
has_workflow_state_ = 0
workflow_state_ = 0
has_error_message_ = 0
error_message_ = ""
has_only_use_if_required_ = 0
only_use_if_required_ = 0
has_disabled_index_ = 0
disabled_index_ = 0
has_write_division_family_ = 0
write_division_family_ = ""
def __init__(self, contents=None):
self.definition_ = Index()
self.read_division_family_ = []
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def definition(self): return self.definition_
def mutable_definition(self): self.has_definition_ = 1; return self.definition_
def clear_definition(self):self.has_definition_ = 0; self.definition_.Clear()
def has_definition(self): return self.has_definition_
def state(self): return self.state_
def set_state(self, x):
self.has_state_ = 1
self.state_ = x
def clear_state(self):
if self.has_state_:
self.has_state_ = 0
self.state_ = 0
def has_state(self): return self.has_state_
def workflow_state(self): return self.workflow_state_
def set_workflow_state(self, x):
self.has_workflow_state_ = 1
self.workflow_state_ = x
def clear_workflow_state(self):
if self.has_workflow_state_:
self.has_workflow_state_ = 0
self.workflow_state_ = 0
def has_workflow_state(self): return self.has_workflow_state_
def error_message(self): return self.error_message_
def set_error_message(self, x):
self.has_error_message_ = 1
self.error_message_ = x
def clear_error_message(self):
if self.has_error_message_:
self.has_error_message_ = 0
self.error_message_ = ""
def has_error_message(self): return self.has_error_message_
def only_use_if_required(self): return self.only_use_if_required_
def set_only_use_if_required(self, x):
self.has_only_use_if_required_ = 1
self.only_use_if_required_ = x
def clear_only_use_if_required(self):
if self.has_only_use_if_required_:
self.has_only_use_if_required_ = 0
self.only_use_if_required_ = 0
def has_only_use_if_required(self): return self.has_only_use_if_required_
def disabled_index(self): return self.disabled_index_
def set_disabled_index(self, x):
self.has_disabled_index_ = 1
self.disabled_index_ = x
def clear_disabled_index(self):
if self.has_disabled_index_:
self.has_disabled_index_ = 0
self.disabled_index_ = 0
def has_disabled_index(self): return self.has_disabled_index_
def read_division_family_size(self): return len(self.read_division_family_)
def read_division_family_list(self): return self.read_division_family_
def read_division_family(self, i):
return self.read_division_family_[i]
def set_read_division_family(self, i, x):
self.read_division_family_[i] = x
def add_read_division_family(self, x):
self.read_division_family_.append(x)
def clear_read_division_family(self):
self.read_division_family_ = []
def write_division_family(self): return self.write_division_family_
def set_write_division_family(self, x):
self.has_write_division_family_ = 1
self.write_division_family_ = x
def clear_write_division_family(self):
if self.has_write_division_family_:
self.has_write_division_family_ = 0
self.write_division_family_ = ""
def has_write_division_family(self): return self.has_write_division_family_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_id()): self.set_id(x.id())
if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
if (x.has_state()): self.set_state(x.state())
if (x.has_workflow_state()): self.set_workflow_state(x.workflow_state())
if (x.has_error_message()): self.set_error_message(x.error_message())
if (x.has_only_use_if_required()): self.set_only_use_if_required(x.only_use_if_required())
if (x.has_disabled_index()): self.set_disabled_index(x.disabled_index())
for i in xrange(x.read_division_family_size()): self.add_read_division_family(x.read_division_family(i))
if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_definition_ != x.has_definition_: return 0
if self.has_definition_ and self.definition_ != x.definition_: return 0
if self.has_state_ != x.has_state_: return 0
if self.has_state_ and self.state_ != x.state_: return 0
if self.has_workflow_state_ != x.has_workflow_state_: return 0
if self.has_workflow_state_ and self.workflow_state_ != x.workflow_state_: return 0
if self.has_error_message_ != x.has_error_message_: return 0
if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
if self.has_only_use_if_required_ != x.has_only_use_if_required_: return 0
if self.has_only_use_if_required_ and self.only_use_if_required_ != x.only_use_if_required_: return 0
if self.has_disabled_index_ != x.has_disabled_index_: return 0
if self.has_disabled_index_ and self.disabled_index_ != x.disabled_index_: return 0
if len(self.read_division_family_) != len(x.read_division_family_): return 0
for e1, e2 in zip(self.read_division_family_, x.read_division_family_):
if e1 != e2: return 0
if self.has_write_division_family_ != x.has_write_division_family_: return 0
if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: id not set.')
if (not self.has_definition_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: definition not set.')
elif not self.definition_.IsInitialized(debug_strs): initialized = 0
if (not self.has_state_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: state not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.id_)
n += self.lengthString(self.definition_.ByteSize())
n += self.lengthVarInt64(self.state_)
if (self.has_workflow_state_): n += 1 + self.lengthVarInt64(self.workflow_state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
if (self.has_only_use_if_required_): n += 2
if (self.has_disabled_index_): n += 2
n += 1 * len(self.read_division_family_)
for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_id_):
n += 1
n += self.lengthVarInt64(self.id_)
if (self.has_definition_):
n += 1
n += self.lengthString(self.definition_.ByteSizePartial())
if (self.has_state_):
n += 1
n += self.lengthVarInt64(self.state_)
if (self.has_workflow_state_): n += 1 + self.lengthVarInt64(self.workflow_state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
if (self.has_only_use_if_required_): n += 2
if (self.has_disabled_index_): n += 2
n += 1 * len(self.read_division_family_)
for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
return n
def Clear(self):
self.clear_app_id()
self.clear_id()
self.clear_definition()
self.clear_state()
self.clear_workflow_state()
self.clear_error_message()
self.clear_only_use_if_required()
self.clear_disabled_index()
self.clear_read_division_family()
self.clear_write_division_family()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.id_)
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSize())
self.definition_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_only_use_if_required_):
out.putVarInt32(48)
out.putBoolean(self.only_use_if_required_)
for i in xrange(len(self.read_division_family_)):
out.putVarInt32(58)
out.putPrefixedString(self.read_division_family_[i])
if (self.has_write_division_family_):
out.putVarInt32(66)
out.putPrefixedString(self.write_division_family_)
if (self.has_disabled_index_):
out.putVarInt32(72)
out.putBoolean(self.disabled_index_)
if (self.has_workflow_state_):
out.putVarInt32(80)
out.putVarInt32(self.workflow_state_)
if (self.has_error_message_):
out.putVarInt32(90)
out.putPrefixedString(self.error_message_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_id_):
out.putVarInt32(16)
out.putVarInt64(self.id_)
if (self.has_definition_):
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSizePartial())
self.definition_.OutputPartial(out)
if (self.has_state_):
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_only_use_if_required_):
out.putVarInt32(48)
out.putBoolean(self.only_use_if_required_)
for i in xrange(len(self.read_division_family_)):
out.putVarInt32(58)
out.putPrefixedString(self.read_division_family_[i])
if (self.has_write_division_family_):
out.putVarInt32(66)
out.putPrefixedString(self.write_division_family_)
if (self.has_disabled_index_):
out.putVarInt32(72)
out.putBoolean(self.disabled_index_)
if (self.has_workflow_state_):
out.putVarInt32(80)
out.putVarInt32(self.workflow_state_)
if (self.has_error_message_):
out.putVarInt32(90)
out.putPrefixedString(self.error_message_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_id(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_definition().TryMerge(tmp)
continue
if tt == 32:
self.set_state(d.getVarInt32())
continue
if tt == 48:
self.set_only_use_if_required(d.getBoolean())
continue
if tt == 58:
self.add_read_division_family(d.getPrefixedString())
continue
if tt == 66:
self.set_write_division_family(d.getPrefixedString())
continue
if tt == 72:
self.set_disabled_index(d.getBoolean())
continue
if tt == 80:
self.set_workflow_state(d.getVarInt32())
continue
if tt == 90:
self.set_error_message(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_definition_:
res+=prefix+"definition <\n"
res+=self.definition_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
if self.has_workflow_state_: res+=prefix+("workflow_state: %s\n" % self.DebugFormatInt32(self.workflow_state_))
if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
if self.has_only_use_if_required_: res+=prefix+("only_use_if_required: %s\n" % self.DebugFormatBool(self.only_use_if_required_))
if self.has_disabled_index_: res+=prefix+("disabled_index: %s\n" % self.DebugFormatBool(self.disabled_index_))
cnt=0
for e in self.read_division_family_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("read_division_family%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kid = 2
kdefinition = 3
kstate = 4
kworkflow_state = 10
kerror_message = 11
konly_use_if_required = 6
kdisabled_index = 9
kread_division_family = 7
kwrite_division_family = 8
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "id",
3: "definition",
4: "state",
6: "only_use_if_required",
7: "read_division_family",
8: "write_division_family",
9: "disabled_index",
10: "workflow_state",
11: "error_message",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeIndex'
class SearchIndexEntry(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
has_write_division_family_ = 0
write_division_family_ = ""
has_fingerprint_1999_ = 0
fingerprint_1999_ = 0
has_fingerprint_2011_ = 0
fingerprint_2011_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def write_division_family(self): return self.write_division_family_
def set_write_division_family(self, x):
self.has_write_division_family_ = 1
self.write_division_family_ = x
def clear_write_division_family(self):
if self.has_write_division_family_:
self.has_write_division_family_ = 0
self.write_division_family_ = ""
def has_write_division_family(self): return self.has_write_division_family_
def fingerprint_1999(self): return self.fingerprint_1999_
def set_fingerprint_1999(self, x):
self.has_fingerprint_1999_ = 1
self.fingerprint_1999_ = x
def clear_fingerprint_1999(self):
if self.has_fingerprint_1999_:
self.has_fingerprint_1999_ = 0
self.fingerprint_1999_ = 0
def has_fingerprint_1999(self): return self.has_fingerprint_1999_
def fingerprint_2011(self): return self.fingerprint_2011_
def set_fingerprint_2011(self, x):
self.has_fingerprint_2011_ = 1
self.fingerprint_2011_ = x
def clear_fingerprint_2011(self):
if self.has_fingerprint_2011_:
self.has_fingerprint_2011_ = 0
self.fingerprint_2011_ = 0
def has_fingerprint_2011(self): return self.has_fingerprint_2011_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
if (x.has_fingerprint_1999()): self.set_fingerprint_1999(x.fingerprint_1999())
if (x.has_fingerprint_2011()): self.set_fingerprint_2011(x.fingerprint_2011())
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if self.has_write_division_family_ != x.has_write_division_family_: return 0
if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
if self.has_fingerprint_1999_ != x.has_fingerprint_1999_: return 0
if self.has_fingerprint_1999_ and self.fingerprint_1999_ != x.fingerprint_1999_: return 0
if self.has_fingerprint_2011_ != x.has_fingerprint_2011_: return 0
if self.has_fingerprint_2011_ and self.fingerprint_2011_ != x.fingerprint_2011_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
if (not self.has_write_division_family_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: write_division_family not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += self.lengthString(len(self.write_division_family_))
if (self.has_fingerprint_1999_): n += 9
if (self.has_fingerprint_2011_): n += 9
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
if (self.has_write_division_family_):
n += 1
n += self.lengthString(len(self.write_division_family_))
if (self.has_fingerprint_1999_): n += 9
if (self.has_fingerprint_2011_): n += 9
return n
def Clear(self):
self.clear_index_id()
self.clear_write_division_family()
self.clear_fingerprint_1999()
self.clear_fingerprint_2011()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
out.putVarInt32(18)
out.putPrefixedString(self.write_division_family_)
if (self.has_fingerprint_1999_):
out.putVarInt32(25)
out.put64(self.fingerprint_1999_)
if (self.has_fingerprint_2011_):
out.putVarInt32(33)
out.put64(self.fingerprint_2011_)
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
if (self.has_write_division_family_):
out.putVarInt32(18)
out.putPrefixedString(self.write_division_family_)
if (self.has_fingerprint_1999_):
out.putVarInt32(25)
out.put64(self.fingerprint_1999_)
if (self.has_fingerprint_2011_):
out.putVarInt32(33)
out.put64(self.fingerprint_2011_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.set_write_division_family(d.getPrefixedString())
continue
if tt == 25:
self.set_fingerprint_1999(d.get64())
continue
if tt == 33:
self.set_fingerprint_2011(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
if self.has_fingerprint_1999_: res+=prefix+("fingerprint_1999: %s\n" % self.DebugFormatFixed64(self.fingerprint_1999_))
if self.has_fingerprint_2011_: res+=prefix+("fingerprint_2011: %s\n" % self.DebugFormatFixed64(self.fingerprint_2011_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kwrite_division_family = 2
kfingerprint_1999 = 3
kfingerprint_2011 = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "write_division_family",
3: "fingerprint_1999",
4: "fingerprint_2011",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexEntry'
class SearchIndexExternalId(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
has_primary_key_ = 0
def __init__(self, contents=None):
self.primary_key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def primary_key(self): return self.primary_key_
def mutable_primary_key(self): self.has_primary_key_ = 1; return self.primary_key_
def clear_primary_key(self):self.has_primary_key_ = 0; self.primary_key_.Clear()
def has_primary_key(self): return self.has_primary_key_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
if (x.has_primary_key()): self.mutable_primary_key().MergeFrom(x.primary_key())
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if self.has_primary_key_ != x.has_primary_key_: return 0
if self.has_primary_key_ and self.primary_key_ != x.primary_key_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
if (not self.has_primary_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: primary_key not set.')
elif not self.primary_key_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += self.lengthString(self.primary_key_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
if (self.has_primary_key_):
n += 1
n += self.lengthString(self.primary_key_.ByteSizePartial())
return n
def Clear(self):
self.clear_index_id()
self.clear_primary_key()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
out.putVarInt32(18)
out.putVarInt32(self.primary_key_.ByteSize())
self.primary_key_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
if (self.has_primary_key_):
out.putVarInt32(18)
out.putVarInt32(self.primary_key_.ByteSizePartial())
self.primary_key_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_primary_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
if self.has_primary_key_:
res+=prefix+"primary_key <\n"
res+=self.primary_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kprimary_key = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "primary_key",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexExternalId'
class IndexPostfix_IndexValue(ProtocolBuffer.ProtocolMessage):
has_property_name_ = 0
property_name_ = ""
has_value_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def property_name(self): return self.property_name_
def set_property_name(self, x):
self.has_property_name_ = 1
self.property_name_ = x
def clear_property_name(self):
if self.has_property_name_:
self.has_property_name_ = 0
self.property_name_ = ""
def has_property_name(self): return self.has_property_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_property_name()): self.set_property_name(x.property_name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_property_name_ != x.has_property_name_: return 0
if self.has_property_name_ and self.property_name_ != x.property_name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property_name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_name_))
n += self.lengthString(self.value_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_property_name_):
n += 1
n += self.lengthString(len(self.property_name_))
if (self.has_value_):
n += 1
n += self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_property_name()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.property_name_)
out.putVarInt32(18)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_property_name_):
out.putVarInt32(10)
out.putPrefixedString(self.property_name_)
if (self.has_value_):
out.putVarInt32(18)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_property_name(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_name_: res+=prefix+("property_name: %s\n" % self.DebugFormatString(self.property_name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kproperty_name = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "property_name",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPostfix_IndexValue'
class IndexPostfix(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = None
has_before_ = 0
before_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
self.index_value_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def index_value_size(self): return len(self.index_value_)
def index_value_list(self): return self.index_value_
def index_value(self, i):
return self.index_value_[i]
def mutable_index_value(self, i):
return self.index_value_[i]
def add_index_value(self):
x = IndexPostfix_IndexValue()
self.index_value_.append(x)
return x
def clear_index_value(self):
self.index_value_ = []
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def before(self): return self.before_
def set_before(self, x):
self.has_before_ = 1
self.before_ = x
def clear_before(self):
if self.has_before_:
self.has_before_ = 0
self.before_ = 1
def has_before(self): return self.has_before_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_value_size()): self.add_index_value().CopyFrom(x.index_value(i))
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_before()): self.set_before(x.before())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if len(self.index_value_) != len(x.index_value_): return 0
for e1, e2 in zip(self.index_value_, x.index_value_):
if e1 != e2: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_before_ != x.has_before_: return 0
if self.has_before_ and self.before_ != x.before_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_value_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_value_)
for i in xrange(len(self.index_value_)): n += self.lengthString(self.index_value_[i].ByteSize())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.index_value_)
for i in xrange(len(self.index_value_)): n += self.lengthString(self.index_value_[i].ByteSizePartial())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def Clear(self):
self.clear_index_value()
self.clear_key()
self.clear_before()
self.clear_before_ascending()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_value_)):
out.putVarInt32(10)
out.putVarInt32(self.index_value_[i].ByteSize())
self.index_value_[i].OutputUnchecked(out)
if (self.has_key_):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_before_):
out.putVarInt32(24)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(32)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
for i in xrange(len(self.index_value_)):
out.putVarInt32(10)
out.putVarInt32(self.index_value_[i].ByteSizePartial())
self.index_value_[i].OutputPartial(out)
if (self.has_key_):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
if (self.has_before_):
out.putVarInt32(24)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(32)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index_value().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 24:
self.set_before(d.getBoolean())
continue
if tt == 32:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index_value%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_before_: res+=prefix+("before: %s\n" % self.DebugFormatBool(self.before_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_value = 1
kkey = 2
kbefore = 3
kbefore_ascending = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_value",
2: "key",
3: "before",
4: "before_ascending",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPostfix'
class IndexPosition(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_before_ = 0
before_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def before(self): return self.before_
def set_before(self, x):
self.has_before_ = 1
self.before_ = x
def clear_before(self):
if self.has_before_:
self.has_before_ = 0
self.before_ = 1
def has_before(self): return self.has_before_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_before()): self.set_before(x.before())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_before_ != x.has_before_: return 0
if self.has_before_ and self.before_ != x.before_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(len(self.key_))
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(len(self.key_))
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def Clear(self):
self.clear_key()
self.clear_before()
self.clear_before_ascending()
def OutputUnchecked(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_before_):
out.putVarInt32(16)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(24)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_before_):
out.putVarInt32(16)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(24)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 16:
self.set_before(d.getBoolean())
continue
if tt == 24:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_before_: res+=prefix+("before: %s\n" % self.DebugFormatBool(self.before_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kbefore = 2
kbefore_ascending = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "before",
3: "before_ascending",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPosition'
if _extension_runtime:
pass
__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex','SearchIndexEntry','SearchIndexExternalId','IndexPostfix_IndexValue','IndexPostfix','IndexPosition']
| 30.10099 | 381 | 0.679703 |
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_type_):
n += 1
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def OutputPartial(self, out):
if (self.has_type_):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 116: break
if tt == 122:
self.set_type(d.getPrefixedString())
continue
if tt == 128:
self.set_id(d.getVarInt64())
continue
if tt == 138:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage):
has_x_ = 0
x_ = 0.0
has_y_ = 0
y_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def x(self): return self.x_
def set_x(self, x):
self.has_x_ = 1
self.x_ = x
def clear_x(self):
if self.has_x_:
self.has_x_ = 0
self.x_ = 0.0
def has_x(self): return self.has_x_
def y(self): return self.y_
def set_y(self, x):
self.has_y_ = 1
self.y_ = x
def clear_y(self):
if self.has_y_:
self.has_y_ = 0
self.y_ = 0.0
def has_y(self): return self.has_y_
def MergeFrom(self, x):
assert x is not self
if (x.has_x()): self.set_x(x.x())
if (x.has_y()): self.set_y(x.y())
def Equals(self, x):
if x is self: return 1
if self.has_x_ != x.has_x_: return 0
if self.has_x_ and self.x_ != x.x_: return 0
if self.has_y_ != x.has_y_: return 0
if self.has_y_ and self.y_ != x.y_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_x_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: x not set.')
if (not self.has_y_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: y not set.')
return initialized
def ByteSize(self):
n = 0
return n + 18
def ByteSizePartial(self):
n = 0
if (self.has_x_):
n += 9
if (self.has_y_):
n += 9
return n
def Clear(self):
self.clear_x()
self.clear_y()
def OutputUnchecked(self, out):
out.putVarInt32(49)
out.putDouble(self.x_)
out.putVarInt32(57)
out.putDouble(self.y_)
def OutputPartial(self, out):
if (self.has_x_):
out.putVarInt32(49)
out.putDouble(self.x_)
if (self.has_y_):
out.putVarInt32(57)
out.putDouble(self.y_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 49:
self.set_x(d.getDouble())
continue
if tt == 57:
self.set_y(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_x_: res+=prefix+("x: %s\n" % self.DebugFormat(self.x_))
if self.has_y_: res+=prefix+("y: %s\n" % self.DebugFormat(self.y_))
return res
class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
has_obfuscated_gaiaid_ = 0
obfuscated_gaiaid_ = ""
has_federated_identity_ = 0
federated_identity_ = ""
has_federated_provider_ = 0
federated_provider_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
def set_obfuscated_gaiaid(self, x):
self.has_obfuscated_gaiaid_ = 1
self.obfuscated_gaiaid_ = x
def clear_obfuscated_gaiaid(self):
if self.has_obfuscated_gaiaid_:
self.has_obfuscated_gaiaid_ = 0
self.obfuscated_gaiaid_ = ""
def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
def federated_identity(self): return self.federated_identity_
def set_federated_identity(self, x):
self.has_federated_identity_ = 1
self.federated_identity_ = x
def clear_federated_identity(self):
if self.has_federated_identity_:
self.has_federated_identity_ = 0
self.federated_identity_ = ""
def has_federated_identity(self): return self.has_federated_identity_
def federated_provider(self): return self.federated_provider_
def set_federated_provider(self, x):
self.has_federated_provider_ = 1
self.federated_provider_ = x
def clear_federated_provider(self):
if self.has_federated_provider_:
self.has_federated_provider_ = 0
self.federated_provider_ = ""
def has_federated_provider(self): return self.has_federated_provider_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
if (x.has_federated_identity()): self.set_federated_identity(x.federated_identity())
if (x.has_federated_provider()): self.set_federated_provider(x.federated_provider())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
if self.has_federated_identity_ != x.has_federated_identity_: return 0
if self.has_federated_identity_ and self.federated_identity_ != x.federated_identity_: return 0
if self.has_federated_provider_ != x.has_federated_provider_: return 0
if self.has_federated_provider_ and self.federated_provider_ != x.federated_provider_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 2 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 2 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 2 + self.lengthString(len(self.federated_provider_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_email_):
n += 1
n += self.lengthString(len(self.email_))
if (self.has_auth_domain_):
n += 1
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
if (self.has_gaiaid_):
n += 2
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 2 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 2 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 2 + self.lengthString(len(self.federated_provider_))
return n
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
self.clear_obfuscated_gaiaid()
self.clear_federated_identity()
self.clear_federated_provider()
def OutputUnchecked(self, out):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(154)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(170)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(178)
out.putPrefixedString(self.federated_provider_)
def OutputPartial(self, out):
if (self.has_email_):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
if (self.has_auth_domain_):
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
if (self.has_gaiaid_):
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(154)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(170)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(178)
out.putPrefixedString(self.federated_provider_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 68: break
if tt == 74:
self.set_email(d.getPrefixedString())
continue
if tt == 82:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 90:
self.set_nickname(d.getPrefixedString())
continue
if tt == 144:
self.set_gaiaid(d.getVarInt64())
continue
if tt == 154:
self.set_obfuscated_gaiaid(d.getPrefixedString())
continue
if tt == 170:
self.set_federated_identity(d.getPrefixedString())
continue
if tt == 178:
self.set_federated_provider(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
if self.has_federated_identity_: res+=prefix+("federated_identity: %s\n" % self.DebugFormatString(self.federated_identity_))
if self.has_federated_provider_: res+=prefix+("federated_provider: %s\n" % self.DebugFormatString(self.federated_provider_))
return res
class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
def __init__(self, contents=None):
self.pathelement_ = []
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def pathelement_size(self): return len(self.pathelement_)
def pathelement_list(self): return self.pathelement_
def pathelement(self, i):
return self.pathelement_[i]
def mutable_pathelement(self, i):
return self.pathelement_[i]
def add_pathelement(self):
x = PropertyValue_ReferenceValuePathElement()
self.pathelement_.append(x)
return x
def clear_pathelement(self):
self.pathelement_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
for i in xrange(x.pathelement_size()): self.add_pathelement().CopyFrom(x.pathelement(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if len(self.pathelement_) != len(x.pathelement_): return 0
for e1, e2 in zip(self.pathelement_, x.pathelement_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
for p in self.pathelement_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSize()
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_app()
self.clear_name_space()
self.clear_pathelement()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputUnchecked(out)
out.putVarInt32(116)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputPartial(out)
out.putVarInt32(116)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 115:
self.add_pathelement().TryMerge(d)
continue
if tt == 162:
self.set_name_space(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
cnt=0
for e in self.pathelement_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("PathElement%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
class PropertyValue(ProtocolBuffer.ProtocolMessage):
has_int64value_ = 0
int64value_ = 0
has_booleanvalue_ = 0
booleanvalue_ = 0
has_stringvalue_ = 0
stringvalue_ = ""
has_doublevalue_ = 0
doublevalue_ = 0.0
has_pointvalue_ = 0
pointvalue_ = None
has_uservalue_ = 0
uservalue_ = None
has_referencevalue_ = 0
referencevalue_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def int64value(self): return self.int64value_
def set_int64value(self, x):
self.has_int64value_ = 1
self.int64value_ = x
def clear_int64value(self):
if self.has_int64value_:
self.has_int64value_ = 0
self.int64value_ = 0
def has_int64value(self): return self.has_int64value_
def booleanvalue(self): return self.booleanvalue_
def set_booleanvalue(self, x):
self.has_booleanvalue_ = 1
self.booleanvalue_ = x
def clear_booleanvalue(self):
if self.has_booleanvalue_:
self.has_booleanvalue_ = 0
self.booleanvalue_ = 0
def has_booleanvalue(self): return self.has_booleanvalue_
def stringvalue(self): return self.stringvalue_
def set_stringvalue(self, x):
self.has_stringvalue_ = 1
self.stringvalue_ = x
def clear_stringvalue(self):
if self.has_stringvalue_:
self.has_stringvalue_ = 0
self.stringvalue_ = ""
def has_stringvalue(self): return self.has_stringvalue_
def doublevalue(self): return self.doublevalue_
def set_doublevalue(self, x):
self.has_doublevalue_ = 1
self.doublevalue_ = x
def clear_doublevalue(self):
if self.has_doublevalue_:
self.has_doublevalue_ = 0
self.doublevalue_ = 0.0
def has_doublevalue(self): return self.has_doublevalue_
def pointvalue(self):
if self.pointvalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.pointvalue_ is None: self.pointvalue_ = PropertyValue_PointValue()
finally:
self.lazy_init_lock_.release()
return self.pointvalue_
def mutable_pointvalue(self): self.has_pointvalue_ = 1; return self.pointvalue()
def clear_pointvalue(self):
if self.has_pointvalue_:
self.has_pointvalue_ = 0;
if self.pointvalue_ is not None: self.pointvalue_.Clear()
def has_pointvalue(self): return self.has_pointvalue_
def uservalue(self):
if self.uservalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.uservalue_ is None: self.uservalue_ = PropertyValue_UserValue()
finally:
self.lazy_init_lock_.release()
return self.uservalue_
def mutable_uservalue(self): self.has_uservalue_ = 1; return self.uservalue()
def clear_uservalue(self):
if self.has_uservalue_:
self.has_uservalue_ = 0;
if self.uservalue_ is not None: self.uservalue_.Clear()
def has_uservalue(self): return self.has_uservalue_
def referencevalue(self):
if self.referencevalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.referencevalue_ is None: self.referencevalue_ = PropertyValue_ReferenceValue()
finally:
self.lazy_init_lock_.release()
return self.referencevalue_
def mutable_referencevalue(self): self.has_referencevalue_ = 1; return self.referencevalue()
def clear_referencevalue(self):
if self.has_referencevalue_:
self.has_referencevalue_ = 0;
if self.referencevalue_ is not None: self.referencevalue_.Clear()
def has_referencevalue(self): return self.has_referencevalue_
def MergeFrom(self, x):
assert x is not self
if (x.has_int64value()): self.set_int64value(x.int64value())
if (x.has_booleanvalue()): self.set_booleanvalue(x.booleanvalue())
if (x.has_stringvalue()): self.set_stringvalue(x.stringvalue())
if (x.has_doublevalue()): self.set_doublevalue(x.doublevalue())
if (x.has_pointvalue()): self.mutable_pointvalue().MergeFrom(x.pointvalue())
if (x.has_uservalue()): self.mutable_uservalue().MergeFrom(x.uservalue())
if (x.has_referencevalue()): self.mutable_referencevalue().MergeFrom(x.referencevalue())
def Equals(self, x):
if x is self: return 1
if self.has_int64value_ != x.has_int64value_: return 0
if self.has_int64value_ and self.int64value_ != x.int64value_: return 0
if self.has_booleanvalue_ != x.has_booleanvalue_: return 0
if self.has_booleanvalue_ and self.booleanvalue_ != x.booleanvalue_: return 0
if self.has_stringvalue_ != x.has_stringvalue_: return 0
if self.has_stringvalue_ and self.stringvalue_ != x.stringvalue_: return 0
if self.has_doublevalue_ != x.has_doublevalue_: return 0
if self.has_doublevalue_ and self.doublevalue_ != x.doublevalue_: return 0
if self.has_pointvalue_ != x.has_pointvalue_: return 0
if self.has_pointvalue_ and self.pointvalue_ != x.pointvalue_: return 0
if self.has_uservalue_ != x.has_uservalue_: return 0
if self.has_uservalue_ and self.uservalue_ != x.uservalue_: return 0
if self.has_referencevalue_ != x.has_referencevalue_: return 0
if self.has_referencevalue_ and self.referencevalue_ != x.referencevalue_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_pointvalue_ and not self.pointvalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_uservalue_ and not self.uservalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_referencevalue_ and not self.referencevalue_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSize()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSize()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSize()
return n
def ByteSizePartial(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSizePartial()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSizePartial()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSizePartial()
return n
def Clear(self):
self.clear_int64value()
self.clear_booleanvalue()
self.clear_stringvalue()
self.clear_doublevalue()
self.clear_pointvalue()
self.clear_uservalue()
self.clear_referencevalue()
def OutputUnchecked(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputUnchecked(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputUnchecked(out)
out.putVarInt32(100)
def OutputPartial(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputPartial(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputPartial(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputPartial(out)
out.putVarInt32(100)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_int64value(d.getVarInt64())
continue
if tt == 16:
self.set_booleanvalue(d.getBoolean())
continue
if tt == 26:
self.set_stringvalue(d.getPrefixedString())
continue
if tt == 33:
self.set_doublevalue(d.getDouble())
continue
if tt == 43:
self.mutable_pointvalue().TryMerge(d)
continue
if tt == 67:
self.mutable_uservalue().TryMerge(d)
continue
if tt == 99:
self.mutable_referencevalue().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_int64value_: res+=prefix+("int64Value: %s\n" % self.DebugFormatInt64(self.int64value_))
if self.has_booleanvalue_: res+=prefix+("booleanValue: %s\n" % self.DebugFormatBool(self.booleanvalue_))
if self.has_stringvalue_: res+=prefix+("stringValue: %s\n" % self.DebugFormatString(self.stringvalue_))
if self.has_doublevalue_: res+=prefix+("doubleValue: %s\n" % self.DebugFormat(self.doublevalue_))
if self.has_pointvalue_:
res+=prefix+"PointValue {\n"
res+=self.pointvalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_uservalue_:
res+=prefix+"UserValue {\n"
res+=self.uservalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_referencevalue_:
res+=prefix+"ReferenceValue {\n"
res+=self.referencevalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kint64Value = 1
kbooleanValue = 2
kstringValue = 3
kdoubleValue = 4
kPointValueGroup = 5
kPointValuex = 6
kPointValuey = 7
kUserValueGroup = 8
kUserValueemail = 9
kUserValueauth_domain = 10
kUserValuenickname = 11
kUserValuegaiaid = 18
kUserValueobfuscated_gaiaid = 19
kUserValuefederated_identity = 21
kUserValuefederated_provider = 22
kReferenceValueGroup = 12
kReferenceValueapp = 13
kReferenceValuename_space = 20
kReferenceValuePathElementGroup = 14
kReferenceValuePathElementtype = 15
kReferenceValuePathElementid = 16
kReferenceValuePathElementname = 17
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "int64Value",
2: "booleanValue",
3: "stringValue",
4: "doubleValue",
5: "PointValue",
6: "x",
7: "y",
8: "UserValue",
9: "email",
10: "auth_domain",
11: "nickname",
12: "ReferenceValue",
13: "app",
14: "PathElement",
15: "type",
16: "id",
17: "name",
18: "gaiaid",
19: "obfuscated_gaiaid",
20: "name_space",
21: "federated_identity",
22: "federated_provider",
}, 22)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.DOUBLE,
7: ProtocolBuffer.Encoder.DOUBLE,
8: ProtocolBuffer.Encoder.STARTGROUP,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STARTGROUP,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STARTGROUP,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
}, 22, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.PropertyValue'
class Property(ProtocolBuffer.ProtocolMessage):
NO_MEANING = 0
BLOB = 14
TEXT = 15
BYTESTRING = 16
ATOM_CATEGORY = 1
ATOM_LINK = 2
ATOM_TITLE = 3
ATOM_CONTENT = 4
ATOM_SUMMARY = 5
ATOM_AUTHOR = 6
GD_WHEN = 7
GD_EMAIL = 8
GEORSS_POINT = 9
GD_IM = 10
GD_PHONENUMBER = 11
GD_POSTALADDRESS = 12
GD_RATING = 13
BLOBKEY = 17
ENTITY_PROTO = 19
INDEX_VALUE = 18
_Meaning_NAMES = {
0: "NO_MEANING",
14: "BLOB",
15: "TEXT",
16: "BYTESTRING",
1: "ATOM_CATEGORY",
2: "ATOM_LINK",
3: "ATOM_TITLE",
4: "ATOM_CONTENT",
5: "ATOM_SUMMARY",
6: "ATOM_AUTHOR",
7: "GD_WHEN",
8: "GD_EMAIL",
9: "GEORSS_POINT",
10: "GD_IM",
11: "GD_PHONENUMBER",
12: "GD_POSTALADDRESS",
13: "GD_RATING",
17: "BLOBKEY",
19: "ENTITY_PROTO",
18: "INDEX_VALUE",
}
def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
Meaning_Name = classmethod(Meaning_Name)
has_meaning_ = 0
meaning_ = 0
has_meaning_uri_ = 0
meaning_uri_ = ""
has_name_ = 0
name_ = ""
has_value_ = 0
has_multiple_ = 0
multiple_ = 0
has_embedded_ = 0
embedded_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def meaning_uri(self): return self.meaning_uri_
def set_meaning_uri(self, x):
self.has_meaning_uri_ = 1
self.meaning_uri_ = x
def clear_meaning_uri(self):
if self.has_meaning_uri_:
self.has_meaning_uri_ = 0
self.meaning_uri_ = ""
def has_meaning_uri(self): return self.has_meaning_uri_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def multiple(self): return self.multiple_
def set_multiple(self, x):
self.has_multiple_ = 1
self.multiple_ = x
def clear_multiple(self):
if self.has_multiple_:
self.has_multiple_ = 0
self.multiple_ = 0
def has_multiple(self): return self.has_multiple_
def embedded(self): return self.embedded_
def set_embedded(self, x):
self.has_embedded_ = 1
self.embedded_ = x
def clear_embedded(self):
if self.has_embedded_:
self.has_embedded_ = 0
self.embedded_ = 0
def has_embedded(self): return self.has_embedded_
def MergeFrom(self, x):
assert x is not self
if (x.has_meaning()): self.set_meaning(x.meaning())
if (x.has_meaning_uri()): self.set_meaning_uri(x.meaning_uri())
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
if (x.has_multiple()): self.set_multiple(x.multiple())
if (x.has_embedded()): self.set_embedded(x.embedded())
def Equals(self, x):
if x is self: return 1
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
if self.has_meaning_uri_ != x.has_meaning_uri_: return 0
if self.has_meaning_uri_ and self.meaning_uri_ != x.meaning_uri_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_multiple_ != x.has_multiple_: return 0
if self.has_multiple_ and self.multiple_ != x.multiple_: return 0
if self.has_embedded_ != x.has_embedded_: return 0
if self.has_embedded_ and self.embedded_ != x.embedded_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
if (not self.has_multiple_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: multiple not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
n += self.lengthString(len(self.name_))
n += self.lengthString(self.value_.ByteSize())
if (self.has_embedded_): n += 2
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_value_):
n += 1
n += self.lengthString(self.value_.ByteSizePartial())
if (self.has_multiple_):
n += 2
if (self.has_embedded_): n += 2
return n
def Clear(self):
self.clear_meaning()
self.clear_meaning_uri()
self.clear_name()
self.clear_value()
self.clear_multiple()
self.clear_embedded()
def OutputUnchecked(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
out.putVarInt32(26)
out.putPrefixedString(self.name_)
out.putVarInt32(32)
out.putBoolean(self.multiple_)
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
if (self.has_embedded_):
out.putVarInt32(48)
out.putBoolean(self.embedded_)
def OutputPartial(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_multiple_):
out.putVarInt32(32)
out.putBoolean(self.multiple_)
if (self.has_value_):
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
if (self.has_embedded_):
out.putVarInt32(48)
out.putBoolean(self.embedded_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_meaning(d.getVarInt32())
continue
if tt == 18:
self.set_meaning_uri(d.getPrefixedString())
continue
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_multiple(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if tt == 48:
self.set_embedded(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
if self.has_meaning_uri_: res+=prefix+("meaning_uri: %s\n" % self.DebugFormatString(self.meaning_uri_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
if self.has_embedded_: res+=prefix+("embedded: %s\n" % self.DebugFormatBool(self.embedded_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kmeaning = 1
kmeaning_uri = 2
kname = 3
kvalue = 5
kmultiple = 4
kembedded = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "meaning",
2: "meaning_uri",
3: "name",
4: "multiple",
5: "value",
6: "embedded",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Property'
class Path_Element(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_type_):
n += 1
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def OutputPartial(self, out):
if (self.has_type_):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_type(d.getPrefixedString())
continue
if tt == 24:
self.set_id(d.getVarInt64())
continue
if tt == 34:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class Path(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.element_ = []
if contents is not None: self.MergeFromString(contents)
def element_size(self): return len(self.element_)
def element_list(self): return self.element_
def element(self, i):
return self.element_[i]
def mutable_element(self, i):
return self.element_[i]
def add_element(self):
x = Path_Element()
self.element_.append(x)
return x
def clear_element(self):
self.element_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.element_size()): self.add_element().CopyFrom(x.element(i))
def Equals(self, x):
if x is self: return 1
if len(self.element_) != len(x.element_): return 0
for e1, e2 in zip(self.element_, x.element_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.element_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_element()
def OutputUnchecked(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_element().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.element_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Element%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kElementGroup = 1
kElementtype = 2
kElementid = 3
kElementname = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Element",
2: "type",
3: "id",
4: "name",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Path'
class Reference(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
has_path_ = 0
def __init__(self, contents=None):
self.path_ = Path()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def path(self): return self.path_
def mutable_path(self): self.has_path_ = 1; return self.path_
def clear_path(self):self.has_path_ = 0; self.path_.Clear()
def has_path(self): return self.has_path_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
if (x.has_path()): self.mutable_path().MergeFrom(x.path())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
elif not self.path_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
n += self.lengthString(self.path_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_path_):
n += 1
n += self.lengthString(self.path_.ByteSizePartial())
return n
def Clear(self):
self.clear_app()
self.clear_name_space()
self.clear_path()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSize())
self.path_.OutputUnchecked(out)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
if (self.has_path_):
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSizePartial())
self.path_.OutputPartial(out)
if (self.has_name_space_):
out.putVarInt32(162)
out.putPrefixedString(self.name_space_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_path().TryMerge(tmp)
continue
if tt == 162:
self.set_name_space(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
if self.has_path_:
res+=prefix+"path <\n"
res+=self.path_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp = 13
kname_space = 20
kpath = 14
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
13: "app",
14: "path",
20: "name_space",
}, 20)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
}, 20, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Reference'
class User(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
has_obfuscated_gaiaid_ = 0
obfuscated_gaiaid_ = ""
has_federated_identity_ = 0
federated_identity_ = ""
has_federated_provider_ = 0
federated_provider_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
def set_obfuscated_gaiaid(self, x):
self.has_obfuscated_gaiaid_ = 1
self.obfuscated_gaiaid_ = x
def clear_obfuscated_gaiaid(self):
if self.has_obfuscated_gaiaid_:
self.has_obfuscated_gaiaid_ = 0
self.obfuscated_gaiaid_ = ""
def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
def federated_identity(self): return self.federated_identity_
def set_federated_identity(self, x):
self.has_federated_identity_ = 1
self.federated_identity_ = x
def clear_federated_identity(self):
if self.has_federated_identity_:
self.has_federated_identity_ = 0
self.federated_identity_ = ""
def has_federated_identity(self): return self.has_federated_identity_
def federated_provider(self): return self.federated_provider_
def set_federated_provider(self, x):
self.has_federated_provider_ = 1
self.federated_provider_ = x
def clear_federated_provider(self):
if self.has_federated_provider_:
self.has_federated_provider_ = 0
self.federated_provider_ = ""
def has_federated_provider(self): return self.has_federated_provider_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
if (x.has_federated_identity()): self.set_federated_identity(x.federated_identity())
if (x.has_federated_provider()): self.set_federated_provider(x.federated_provider())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
if self.has_federated_identity_ != x.has_federated_identity_: return 0
if self.has_federated_identity_ and self.federated_identity_ != x.federated_identity_: return 0
if self.has_federated_provider_ != x.has_federated_provider_: return 0
if self.has_federated_provider_ and self.federated_provider_ != x.federated_provider_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 1 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 1 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 1 + self.lengthString(len(self.federated_provider_))
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_email_):
n += 1
n += self.lengthString(len(self.email_))
if (self.has_auth_domain_):
n += 1
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
if (self.has_gaiaid_):
n += 1
n += self.lengthVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_): n += 1 + self.lengthString(len(self.obfuscated_gaiaid_))
if (self.has_federated_identity_): n += 1 + self.lengthString(len(self.federated_identity_))
if (self.has_federated_provider_): n += 1 + self.lengthString(len(self.federated_provider_))
return n
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
self.clear_obfuscated_gaiaid()
self.clear_federated_identity()
self.clear_federated_provider()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(42)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(50)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(58)
out.putPrefixedString(self.federated_provider_)
def OutputPartial(self, out):
if (self.has_email_):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
if (self.has_auth_domain_):
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
if (self.has_gaiaid_):
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
if (self.has_obfuscated_gaiaid_):
out.putVarInt32(42)
out.putPrefixedString(self.obfuscated_gaiaid_)
if (self.has_federated_identity_):
out.putVarInt32(50)
out.putPrefixedString(self.federated_identity_)
if (self.has_federated_provider_):
out.putVarInt32(58)
out.putPrefixedString(self.federated_provider_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_email(d.getPrefixedString())
continue
if tt == 18:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 26:
self.set_nickname(d.getPrefixedString())
continue
if tt == 32:
self.set_gaiaid(d.getVarInt64())
continue
if tt == 42:
self.set_obfuscated_gaiaid(d.getPrefixedString())
continue
if tt == 50:
self.set_federated_identity(d.getPrefixedString())
continue
if tt == 58:
self.set_federated_provider(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
if self.has_federated_identity_: res+=prefix+("federated_identity: %s\n" % self.DebugFormatString(self.federated_identity_))
if self.has_federated_provider_: res+=prefix+("federated_provider: %s\n" % self.DebugFormatString(self.federated_provider_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kemail = 1
kauth_domain = 2
knickname = 3
kgaiaid = 4
kobfuscated_gaiaid = 5
kfederated_identity = 6
kfederated_provider = 7
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "email",
2: "auth_domain",
3: "nickname",
4: "gaiaid",
5: "obfuscated_gaiaid",
6: "federated_identity",
7: "federated_provider",
}, 7)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.STRING,
}, 7, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.User'
class EntityProto(ProtocolBuffer.ProtocolMessage):
GD_CONTACT = 1
GD_EVENT = 2
GD_MESSAGE = 3
_Kind_NAMES = {
1: "GD_CONTACT",
2: "GD_EVENT",
3: "GD_MESSAGE",
}
def Kind_Name(cls, x): return cls._Kind_NAMES.get(x, "")
Kind_Name = classmethod(Kind_Name)
has_key_ = 0
has_entity_group_ = 0
has_owner_ = 0
owner_ = None
has_kind_ = 0
kind_ = 0
has_kind_uri_ = 0
kind_uri_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
self.entity_group_ = Path()
self.property_ = []
self.raw_property_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def entity_group(self): return self.entity_group_
def mutable_entity_group(self): self.has_entity_group_ = 1; return self.entity_group_
def clear_entity_group(self):self.has_entity_group_ = 0; self.entity_group_.Clear()
def has_entity_group(self): return self.has_entity_group_
def owner(self):
if self.owner_ is None:
self.lazy_init_lock_.acquire()
try:
if self.owner_ is None: self.owner_ = User()
finally:
self.lazy_init_lock_.release()
return self.owner_
def mutable_owner(self): self.has_owner_ = 1; return self.owner()
def clear_owner(self):
if self.has_owner_:
self.has_owner_ = 0;
if self.owner_ is not None: self.owner_.Clear()
def has_owner(self): return self.has_owner_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = 0
def has_kind(self): return self.has_kind_
def kind_uri(self): return self.kind_uri_
def set_kind_uri(self, x):
self.has_kind_uri_ = 1
self.kind_uri_ = x
def clear_kind_uri(self):
if self.has_kind_uri_:
self.has_kind_uri_ = 0
self.kind_uri_ = ""
def has_kind_uri(self): return self.has_kind_uri_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def raw_property_size(self): return len(self.raw_property_)
def raw_property_list(self): return self.raw_property_
def raw_property(self, i):
return self.raw_property_[i]
def mutable_raw_property(self, i):
return self.raw_property_[i]
def add_raw_property(self):
x = Property()
self.raw_property_.append(x)
return x
def clear_raw_property(self):
self.raw_property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_entity_group()): self.mutable_entity_group().MergeFrom(x.entity_group())
if (x.has_owner()): self.mutable_owner().MergeFrom(x.owner())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_kind_uri()): self.set_kind_uri(x.kind_uri())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
for i in xrange(x.raw_property_size()): self.add_raw_property().CopyFrom(x.raw_property(i))
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_entity_group_ != x.has_entity_group_: return 0
if self.has_entity_group_ and self.entity_group_ != x.entity_group_: return 0
if self.has_owner_ != x.has_owner_: return 0
if self.has_owner_ and self.owner_ != x.owner_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_kind_uri_ != x.has_kind_uri_: return 0
if self.has_kind_uri_ and self.kind_uri_ != x.kind_uri_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
if len(self.raw_property_) != len(x.raw_property_): return 0
for e1, e2 in zip(self.raw_property_, x.raw_property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_entity_group_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_group not set.')
elif not self.entity_group_.IsInitialized(debug_strs): initialized = 0
if (self.has_owner_ and not self.owner_.IsInitialized(debug_strs)): initialized = 0
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.raw_property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
n += self.lengthString(self.entity_group_.ByteSize())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSize())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSize())
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(self.key_.ByteSizePartial())
if (self.has_entity_group_):
n += 2
n += self.lengthString(self.entity_group_.ByteSizePartial())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSizePartial())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_key()
self.clear_entity_group()
self.clear_owner()
self.clear_kind()
self.clear_kind_uri()
self.clear_property()
self.clear_raw_property()
def OutputUnchecked(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSize())
self.raw_property_[i].OutputUnchecked(out)
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSize())
self.entity_group_.OutputUnchecked(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSize())
self.owner_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
if (self.has_key_):
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSizePartial())
self.raw_property_[i].OutputPartial(out)
if (self.has_entity_group_):
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSizePartial())
self.entity_group_.OutputPartial(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSizePartial())
self.owner_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_kind(d.getVarInt32())
continue
if tt == 42:
self.set_kind_uri(d.getPrefixedString())
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if tt == 122:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_raw_property().TryMerge(tmp)
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity_group().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_owner().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_entity_group_:
res+=prefix+"entity_group <\n"
res+=self.entity_group_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_owner_:
res+=prefix+"owner <\n"
res+=self.owner_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatInt32(self.kind_))
if self.has_kind_uri_: res+=prefix+("kind_uri: %s\n" % self.DebugFormatString(self.kind_uri_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.raw_property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("raw_property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 13
kentity_group = 16
kowner = 17
kkind = 4
kkind_uri = 5
kproperty = 14
kraw_property = 15
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
4: "kind",
5: "kind_uri",
13: "key",
14: "property",
15: "raw_property",
16: "entity_group",
17: "owner",
}, 17)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
}, 17, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.EntityProto'
class CompositeProperty(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
for i in xrange(x.value_size()): self.add_value(x.value(i))
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n
def Clear(self):
self.clear_index_id()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeProperty'
class Index_Property(ProtocolBuffer.ProtocolMessage):
DIRECTION_UNSPECIFIED = 0
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
0: "DIRECTION_UNSPECIFIED",
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
MODE_UNSPECIFIED = 0
GEOSPATIAL = 3
_Mode_NAMES = {
0: "MODE_UNSPECIFIED",
3: "GEOSPATIAL",
}
def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
Mode_Name = classmethod(Mode_Name)
has_name_ = 0
name_ = ""
has_direction_ = 0
direction_ = 0
has_mode_ = 0
mode_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 0
def has_direction(self): return self.has_direction_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_direction()): self.set_direction(x.direction())
if (x.has_mode()): self.set_mode(x.mode())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
return n
def Clear(self):
self.clear_name()
self.clear_direction()
self.clear_mode()
def OutputUnchecked(self, out):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
if (self.has_mode_):
out.putVarInt32(48)
out.putVarInt32(self.mode_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
if (self.has_mode_):
out.putVarInt32(48)
out.putVarInt32(self.mode_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_direction(d.getVarInt32())
continue
if tt == 48:
self.set_mode(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
return res
class Index(ProtocolBuffer.ProtocolMessage):
has_entity_type_ = 0
entity_type_ = ""
has_ancestor_ = 0
ancestor_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def entity_type(self): return self.entity_type_
def set_entity_type(self, x):
self.has_entity_type_ = 1
self.entity_type_ = x
def clear_entity_type(self):
if self.has_entity_type_:
self.has_entity_type_ = 0
self.entity_type_ = ""
def has_entity_type(self): return self.has_entity_type_
def ancestor(self): return self.ancestor_
def set_ancestor(self, x):
self.has_ancestor_ = 1
self.ancestor_ = x
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0
self.ancestor_ = 0
def has_ancestor(self): return self.has_ancestor_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Index_Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_entity_type()): self.set_entity_type(x.entity_type())
if (x.has_ancestor()): self.set_ancestor(x.ancestor())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_entity_type_ != x.has_entity_type_: return 0
if self.has_entity_type_ and self.entity_type_ != x.entity_type_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_entity_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_type not set.')
if (not self.has_ancestor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: ancestor not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.entity_type_))
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSize()
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_entity_type_):
n += 1
n += self.lengthString(len(self.entity_type_))
if (self.has_ancestor_):
n += 2
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_entity_type()
self.clear_ancestor()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputUnchecked(out)
out.putVarInt32(20)
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def OutputPartial(self, out):
if (self.has_entity_type_):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputPartial(out)
out.putVarInt32(20)
if (self.has_ancestor_):
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_entity_type(d.getPrefixedString())
continue
if tt == 19:
self.add_property().TryMerge(d)
continue
if tt == 40:
self.set_ancestor(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_type_: res+=prefix+("entity_type: %s\n" % self.DebugFormatString(self.entity_type_))
if self.has_ancestor_: res+=prefix+("ancestor: %s\n" % self.DebugFormatBool(self.ancestor_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Property%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kentity_type = 1
kancestor = 5
kPropertyGroup = 2
kPropertyname = 3
kPropertydirection = 4
kPropertymode = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "entity_type",
2: "Property",
3: "name",
4: "direction",
5: "ancestor",
6: "mode",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Index'
class CompositeIndex(ProtocolBuffer.ProtocolMessage):
WRITE_ONLY = 1
READ_WRITE = 2
DELETED = 3
ERROR = 4
_State_NAMES = {
1: "WRITE_ONLY",
2: "READ_WRITE",
3: "DELETED",
4: "ERROR",
}
def State_Name(cls, x): return cls._State_NAMES.get(x, "")
State_Name = classmethod(State_Name)
PENDING = 1
ACTIVE = 2
COMPLETED = 3
_WorkflowState_NAMES = {
1: "PENDING",
2: "ACTIVE",
3: "COMPLETED",
}
def WorkflowState_Name(cls, x): return cls._WorkflowState_NAMES.get(x, "")
WorkflowState_Name = classmethod(WorkflowState_Name)
has_app_id_ = 0
app_id_ = ""
has_id_ = 0
id_ = 0
has_definition_ = 0
has_state_ = 0
state_ = 0
has_workflow_state_ = 0
workflow_state_ = 0
has_error_message_ = 0
error_message_ = ""
has_only_use_if_required_ = 0
only_use_if_required_ = 0
has_disabled_index_ = 0
disabled_index_ = 0
has_write_division_family_ = 0
write_division_family_ = ""
def __init__(self, contents=None):
self.definition_ = Index()
self.read_division_family_ = []
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def definition(self): return self.definition_
def mutable_definition(self): self.has_definition_ = 1; return self.definition_
def clear_definition(self):self.has_definition_ = 0; self.definition_.Clear()
def has_definition(self): return self.has_definition_
def state(self): return self.state_
def set_state(self, x):
self.has_state_ = 1
self.state_ = x
def clear_state(self):
if self.has_state_:
self.has_state_ = 0
self.state_ = 0
def has_state(self): return self.has_state_
def workflow_state(self): return self.workflow_state_
def set_workflow_state(self, x):
self.has_workflow_state_ = 1
self.workflow_state_ = x
def clear_workflow_state(self):
if self.has_workflow_state_:
self.has_workflow_state_ = 0
self.workflow_state_ = 0
def has_workflow_state(self): return self.has_workflow_state_
def error_message(self): return self.error_message_
def set_error_message(self, x):
self.has_error_message_ = 1
self.error_message_ = x
def clear_error_message(self):
if self.has_error_message_:
self.has_error_message_ = 0
self.error_message_ = ""
def has_error_message(self): return self.has_error_message_
def only_use_if_required(self): return self.only_use_if_required_
def set_only_use_if_required(self, x):
self.has_only_use_if_required_ = 1
self.only_use_if_required_ = x
def clear_only_use_if_required(self):
if self.has_only_use_if_required_:
self.has_only_use_if_required_ = 0
self.only_use_if_required_ = 0
def has_only_use_if_required(self): return self.has_only_use_if_required_
def disabled_index(self): return self.disabled_index_
def set_disabled_index(self, x):
self.has_disabled_index_ = 1
self.disabled_index_ = x
def clear_disabled_index(self):
if self.has_disabled_index_:
self.has_disabled_index_ = 0
self.disabled_index_ = 0
def has_disabled_index(self): return self.has_disabled_index_
def read_division_family_size(self): return len(self.read_division_family_)
def read_division_family_list(self): return self.read_division_family_
def read_division_family(self, i):
return self.read_division_family_[i]
def set_read_division_family(self, i, x):
self.read_division_family_[i] = x
def add_read_division_family(self, x):
self.read_division_family_.append(x)
def clear_read_division_family(self):
self.read_division_family_ = []
def write_division_family(self): return self.write_division_family_
def set_write_division_family(self, x):
self.has_write_division_family_ = 1
self.write_division_family_ = x
def clear_write_division_family(self):
if self.has_write_division_family_:
self.has_write_division_family_ = 0
self.write_division_family_ = ""
def has_write_division_family(self): return self.has_write_division_family_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_id()): self.set_id(x.id())
if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
if (x.has_state()): self.set_state(x.state())
if (x.has_workflow_state()): self.set_workflow_state(x.workflow_state())
if (x.has_error_message()): self.set_error_message(x.error_message())
if (x.has_only_use_if_required()): self.set_only_use_if_required(x.only_use_if_required())
if (x.has_disabled_index()): self.set_disabled_index(x.disabled_index())
for i in xrange(x.read_division_family_size()): self.add_read_division_family(x.read_division_family(i))
if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_definition_ != x.has_definition_: return 0
if self.has_definition_ and self.definition_ != x.definition_: return 0
if self.has_state_ != x.has_state_: return 0
if self.has_state_ and self.state_ != x.state_: return 0
if self.has_workflow_state_ != x.has_workflow_state_: return 0
if self.has_workflow_state_ and self.workflow_state_ != x.workflow_state_: return 0
if self.has_error_message_ != x.has_error_message_: return 0
if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
if self.has_only_use_if_required_ != x.has_only_use_if_required_: return 0
if self.has_only_use_if_required_ and self.only_use_if_required_ != x.only_use_if_required_: return 0
if self.has_disabled_index_ != x.has_disabled_index_: return 0
if self.has_disabled_index_ and self.disabled_index_ != x.disabled_index_: return 0
if len(self.read_division_family_) != len(x.read_division_family_): return 0
for e1, e2 in zip(self.read_division_family_, x.read_division_family_):
if e1 != e2: return 0
if self.has_write_division_family_ != x.has_write_division_family_: return 0
if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: id not set.')
if (not self.has_definition_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: definition not set.')
elif not self.definition_.IsInitialized(debug_strs): initialized = 0
if (not self.has_state_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: state not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.id_)
n += self.lengthString(self.definition_.ByteSize())
n += self.lengthVarInt64(self.state_)
if (self.has_workflow_state_): n += 1 + self.lengthVarInt64(self.workflow_state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
if (self.has_only_use_if_required_): n += 2
if (self.has_disabled_index_): n += 2
n += 1 * len(self.read_division_family_)
for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_id_):
n += 1
n += self.lengthVarInt64(self.id_)
if (self.has_definition_):
n += 1
n += self.lengthString(self.definition_.ByteSizePartial())
if (self.has_state_):
n += 1
n += self.lengthVarInt64(self.state_)
if (self.has_workflow_state_): n += 1 + self.lengthVarInt64(self.workflow_state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
if (self.has_only_use_if_required_): n += 2
if (self.has_disabled_index_): n += 2
n += 1 * len(self.read_division_family_)
for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
return n
def Clear(self):
self.clear_app_id()
self.clear_id()
self.clear_definition()
self.clear_state()
self.clear_workflow_state()
self.clear_error_message()
self.clear_only_use_if_required()
self.clear_disabled_index()
self.clear_read_division_family()
self.clear_write_division_family()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.id_)
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSize())
self.definition_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_only_use_if_required_):
out.putVarInt32(48)
out.putBoolean(self.only_use_if_required_)
for i in xrange(len(self.read_division_family_)):
out.putVarInt32(58)
out.putPrefixedString(self.read_division_family_[i])
if (self.has_write_division_family_):
out.putVarInt32(66)
out.putPrefixedString(self.write_division_family_)
if (self.has_disabled_index_):
out.putVarInt32(72)
out.putBoolean(self.disabled_index_)
if (self.has_workflow_state_):
out.putVarInt32(80)
out.putVarInt32(self.workflow_state_)
if (self.has_error_message_):
out.putVarInt32(90)
out.putPrefixedString(self.error_message_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_id_):
out.putVarInt32(16)
out.putVarInt64(self.id_)
if (self.has_definition_):
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSizePartial())
self.definition_.OutputPartial(out)
if (self.has_state_):
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_only_use_if_required_):
out.putVarInt32(48)
out.putBoolean(self.only_use_if_required_)
for i in xrange(len(self.read_division_family_)):
out.putVarInt32(58)
out.putPrefixedString(self.read_division_family_[i])
if (self.has_write_division_family_):
out.putVarInt32(66)
out.putPrefixedString(self.write_division_family_)
if (self.has_disabled_index_):
out.putVarInt32(72)
out.putBoolean(self.disabled_index_)
if (self.has_workflow_state_):
out.putVarInt32(80)
out.putVarInt32(self.workflow_state_)
if (self.has_error_message_):
out.putVarInt32(90)
out.putPrefixedString(self.error_message_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_id(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_definition().TryMerge(tmp)
continue
if tt == 32:
self.set_state(d.getVarInt32())
continue
if tt == 48:
self.set_only_use_if_required(d.getBoolean())
continue
if tt == 58:
self.add_read_division_family(d.getPrefixedString())
continue
if tt == 66:
self.set_write_division_family(d.getPrefixedString())
continue
if tt == 72:
self.set_disabled_index(d.getBoolean())
continue
if tt == 80:
self.set_workflow_state(d.getVarInt32())
continue
if tt == 90:
self.set_error_message(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_definition_:
res+=prefix+"definition <\n"
res+=self.definition_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
if self.has_workflow_state_: res+=prefix+("workflow_state: %s\n" % self.DebugFormatInt32(self.workflow_state_))
if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
if self.has_only_use_if_required_: res+=prefix+("only_use_if_required: %s\n" % self.DebugFormatBool(self.only_use_if_required_))
if self.has_disabled_index_: res+=prefix+("disabled_index: %s\n" % self.DebugFormatBool(self.disabled_index_))
cnt=0
for e in self.read_division_family_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("read_division_family%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kid = 2
kdefinition = 3
kstate = 4
kworkflow_state = 10
kerror_message = 11
konly_use_if_required = 6
kdisabled_index = 9
kread_division_family = 7
kwrite_division_family = 8
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "id",
3: "definition",
4: "state",
6: "only_use_if_required",
7: "read_division_family",
8: "write_division_family",
9: "disabled_index",
10: "workflow_state",
11: "error_message",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeIndex'
class SearchIndexEntry(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
has_write_division_family_ = 0
write_division_family_ = ""
has_fingerprint_1999_ = 0
fingerprint_1999_ = 0
has_fingerprint_2011_ = 0
fingerprint_2011_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def write_division_family(self): return self.write_division_family_
def set_write_division_family(self, x):
self.has_write_division_family_ = 1
self.write_division_family_ = x
def clear_write_division_family(self):
if self.has_write_division_family_:
self.has_write_division_family_ = 0
self.write_division_family_ = ""
def has_write_division_family(self): return self.has_write_division_family_
def fingerprint_1999(self): return self.fingerprint_1999_
def set_fingerprint_1999(self, x):
self.has_fingerprint_1999_ = 1
self.fingerprint_1999_ = x
def clear_fingerprint_1999(self):
if self.has_fingerprint_1999_:
self.has_fingerprint_1999_ = 0
self.fingerprint_1999_ = 0
def has_fingerprint_1999(self): return self.has_fingerprint_1999_
def fingerprint_2011(self): return self.fingerprint_2011_
def set_fingerprint_2011(self, x):
self.has_fingerprint_2011_ = 1
self.fingerprint_2011_ = x
def clear_fingerprint_2011(self):
if self.has_fingerprint_2011_:
self.has_fingerprint_2011_ = 0
self.fingerprint_2011_ = 0
def has_fingerprint_2011(self): return self.has_fingerprint_2011_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
if (x.has_fingerprint_1999()): self.set_fingerprint_1999(x.fingerprint_1999())
if (x.has_fingerprint_2011()): self.set_fingerprint_2011(x.fingerprint_2011())
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if self.has_write_division_family_ != x.has_write_division_family_: return 0
if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
if self.has_fingerprint_1999_ != x.has_fingerprint_1999_: return 0
if self.has_fingerprint_1999_ and self.fingerprint_1999_ != x.fingerprint_1999_: return 0
if self.has_fingerprint_2011_ != x.has_fingerprint_2011_: return 0
if self.has_fingerprint_2011_ and self.fingerprint_2011_ != x.fingerprint_2011_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
if (not self.has_write_division_family_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: write_division_family not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += self.lengthString(len(self.write_division_family_))
if (self.has_fingerprint_1999_): n += 9
if (self.has_fingerprint_2011_): n += 9
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
if (self.has_write_division_family_):
n += 1
n += self.lengthString(len(self.write_division_family_))
if (self.has_fingerprint_1999_): n += 9
if (self.has_fingerprint_2011_): n += 9
return n
def Clear(self):
self.clear_index_id()
self.clear_write_division_family()
self.clear_fingerprint_1999()
self.clear_fingerprint_2011()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
out.putVarInt32(18)
out.putPrefixedString(self.write_division_family_)
if (self.has_fingerprint_1999_):
out.putVarInt32(25)
out.put64(self.fingerprint_1999_)
if (self.has_fingerprint_2011_):
out.putVarInt32(33)
out.put64(self.fingerprint_2011_)
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
if (self.has_write_division_family_):
out.putVarInt32(18)
out.putPrefixedString(self.write_division_family_)
if (self.has_fingerprint_1999_):
out.putVarInt32(25)
out.put64(self.fingerprint_1999_)
if (self.has_fingerprint_2011_):
out.putVarInt32(33)
out.put64(self.fingerprint_2011_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.set_write_division_family(d.getPrefixedString())
continue
if tt == 25:
self.set_fingerprint_1999(d.get64())
continue
if tt == 33:
self.set_fingerprint_2011(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
if self.has_fingerprint_1999_: res+=prefix+("fingerprint_1999: %s\n" % self.DebugFormatFixed64(self.fingerprint_1999_))
if self.has_fingerprint_2011_: res+=prefix+("fingerprint_2011: %s\n" % self.DebugFormatFixed64(self.fingerprint_2011_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kwrite_division_family = 2
kfingerprint_1999 = 3
kfingerprint_2011 = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "write_division_family",
3: "fingerprint_1999",
4: "fingerprint_2011",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexEntry'
class SearchIndexExternalId(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
has_primary_key_ = 0
def __init__(self, contents=None):
self.primary_key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def primary_key(self): return self.primary_key_
def mutable_primary_key(self): self.has_primary_key_ = 1; return self.primary_key_
def clear_primary_key(self):self.has_primary_key_ = 0; self.primary_key_.Clear()
def has_primary_key(self): return self.has_primary_key_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
if (x.has_primary_key()): self.mutable_primary_key().MergeFrom(x.primary_key())
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if self.has_primary_key_ != x.has_primary_key_: return 0
if self.has_primary_key_ and self.primary_key_ != x.primary_key_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
if (not self.has_primary_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: primary_key not set.')
elif not self.primary_key_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += self.lengthString(self.primary_key_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_index_id_):
n += 1
n += self.lengthVarInt64(self.index_id_)
if (self.has_primary_key_):
n += 1
n += self.lengthString(self.primary_key_.ByteSizePartial())
return n
def Clear(self):
self.clear_index_id()
self.clear_primary_key()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
out.putVarInt32(18)
out.putVarInt32(self.primary_key_.ByteSize())
self.primary_key_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_index_id_):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
if (self.has_primary_key_):
out.putVarInt32(18)
out.putVarInt32(self.primary_key_.ByteSizePartial())
self.primary_key_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_primary_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
if self.has_primary_key_:
res+=prefix+"primary_key <\n"
res+=self.primary_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_id = 1
kprimary_key = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_id",
2: "primary_key",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexExternalId'
class IndexPostfix_IndexValue(ProtocolBuffer.ProtocolMessage):
has_property_name_ = 0
property_name_ = ""
has_value_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def property_name(self): return self.property_name_
def set_property_name(self, x):
self.has_property_name_ = 1
self.property_name_ = x
def clear_property_name(self):
if self.has_property_name_:
self.has_property_name_ = 0
self.property_name_ = ""
def has_property_name(self): return self.has_property_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_property_name()): self.set_property_name(x.property_name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_property_name_ != x.has_property_name_: return 0
if self.has_property_name_ and self.property_name_ != x.property_name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property_name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_name_))
n += self.lengthString(self.value_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_property_name_):
n += 1
n += self.lengthString(len(self.property_name_))
if (self.has_value_):
n += 1
n += self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_property_name()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.property_name_)
out.putVarInt32(18)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_property_name_):
out.putVarInt32(10)
out.putPrefixedString(self.property_name_)
if (self.has_value_):
out.putVarInt32(18)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_property_name(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_name_: res+=prefix+("property_name: %s\n" % self.DebugFormatString(self.property_name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kproperty_name = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "property_name",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPostfix_IndexValue'
class IndexPostfix(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = None
has_before_ = 0
before_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
self.index_value_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def index_value_size(self): return len(self.index_value_)
def index_value_list(self): return self.index_value_
def index_value(self, i):
return self.index_value_[i]
def mutable_index_value(self, i):
return self.index_value_[i]
def add_index_value(self):
x = IndexPostfix_IndexValue()
self.index_value_.append(x)
return x
def clear_index_value(self):
self.index_value_ = []
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def before(self): return self.before_
def set_before(self, x):
self.has_before_ = 1
self.before_ = x
def clear_before(self):
if self.has_before_:
self.has_before_ = 0
self.before_ = 1
def has_before(self): return self.has_before_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_value_size()): self.add_index_value().CopyFrom(x.index_value(i))
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_before()): self.set_before(x.before())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if len(self.index_value_) != len(x.index_value_): return 0
for e1, e2 in zip(self.index_value_, x.index_value_):
if e1 != e2: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_before_ != x.has_before_: return 0
if self.has_before_ and self.before_ != x.before_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_value_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_value_)
for i in xrange(len(self.index_value_)): n += self.lengthString(self.index_value_[i].ByteSize())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.index_value_)
for i in xrange(len(self.index_value_)): n += self.lengthString(self.index_value_[i].ByteSizePartial())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def Clear(self):
self.clear_index_value()
self.clear_key()
self.clear_before()
self.clear_before_ascending()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_value_)):
out.putVarInt32(10)
out.putVarInt32(self.index_value_[i].ByteSize())
self.index_value_[i].OutputUnchecked(out)
if (self.has_key_):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_before_):
out.putVarInt32(24)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(32)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
for i in xrange(len(self.index_value_)):
out.putVarInt32(10)
out.putVarInt32(self.index_value_[i].ByteSizePartial())
self.index_value_[i].OutputPartial(out)
if (self.has_key_):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
if (self.has_before_):
out.putVarInt32(24)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(32)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index_value().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 24:
self.set_before(d.getBoolean())
continue
if tt == 32:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index_value%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_before_: res+=prefix+("before: %s\n" % self.DebugFormatBool(self.before_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_value = 1
kkey = 2
kbefore = 3
kbefore_ascending = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_value",
2: "key",
3: "before",
4: "before_ascending",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPostfix'
class IndexPosition(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_before_ = 0
before_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def before(self): return self.before_
def set_before(self, x):
self.has_before_ = 1
self.before_ = x
def clear_before(self):
if self.has_before_:
self.has_before_ = 0
self.before_ = 1
def has_before(self): return self.has_before_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_before()): self.set_before(x.before())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_before_ != x.has_before_: return 0
if self.has_before_ and self.before_ != x.before_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(len(self.key_))
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(len(self.key_))
if (self.has_before_): n += 2
if (self.has_before_ascending_): n += 2
return n
def Clear(self):
self.clear_key()
self.clear_before()
self.clear_before_ascending()
def OutputUnchecked(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_before_):
out.putVarInt32(16)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(24)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_before_):
out.putVarInt32(16)
out.putBoolean(self.before_)
if (self.has_before_ascending_):
out.putVarInt32(24)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 16:
self.set_before(d.getBoolean())
continue
if tt == 24:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_before_: res+=prefix+("before: %s\n" % self.DebugFormatBool(self.before_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kbefore = 2
kbefore_ascending = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "before",
3: "before_ascending",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.IndexPosition'
if _extension_runtime:
pass
__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex','SearchIndexEntry','SearchIndexExternalId','IndexPostfix_IndexValue','IndexPostfix','IndexPosition']
| true | true |
f73d1e2ab377c5666108bb9937d32b45afad104a | 1,519 | py | Python | uppsell/util/serialize.py | upptalk/uppsell | 843dfe81dbbd7d0cc920e6c8256d27e27c3106fe | [
"MIT"
] | 2 | 2020-01-10T18:21:32.000Z | 2021-01-31T13:53:28.000Z | uppsell/util/serialize.py | gaybro8777/uppsell | 843dfe81dbbd7d0cc920e6c8256d27e27c3106fe | [
"MIT"
] | 4 | 2015-12-03T22:59:05.000Z | 2021-06-10T19:12:59.000Z | uppsell/util/serialize.py | gaybro8777/uppsell | 843dfe81dbbd7d0cc920e6c8256d27e27c3106fe | [
"MIT"
] | 1 | 2021-02-08T08:21:22.000Z | 2021-02-08T08:21:22.000Z | import json, uuid
from datetime import datetime, date
from decimal import Decimal
from werkzeug.http import http_date
from django.db.models import Model
from django.db.models.base import ModelBase
from django.db.models.query import QuerySet, ValuesQuerySet
from django.db.models.fields.related import ManyToManyField
from uppsell.models import Urn
def model_to_dict(instance):
"""Like django.forms.models.model_to_dict, but returns everything
including non-editable fields"""
opts, data = instance._meta, {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class UppsellJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (Model, ModelBase)):
return model_to_dict(obj)
if isinstance(obj, (QuerySet, ValuesQuerySet)):
return [model_to_dict(m) for m in obj]
elif isinstance(obj, datetime):
return obj.isoformat("T")
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, Decimal):
return float(obj)
elif isinstance(obj, Urn) or isinstance(obj, uuid.UUID):
return str(obj)
return json.JSONEncoder.default(self, obj)
| 37.04878 | 95 | 0.665569 | import json, uuid
from datetime import datetime, date
from decimal import Decimal
from werkzeug.http import http_date
from django.db.models import Model
from django.db.models.base import ModelBase
from django.db.models.query import QuerySet, ValuesQuerySet
from django.db.models.fields.related import ManyToManyField
from uppsell.models import Urn
def model_to_dict(instance):
opts, data = instance._meta, {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class UppsellJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (Model, ModelBase)):
return model_to_dict(obj)
if isinstance(obj, (QuerySet, ValuesQuerySet)):
return [model_to_dict(m) for m in obj]
elif isinstance(obj, datetime):
return obj.isoformat("T")
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, Decimal):
return float(obj)
elif isinstance(obj, Urn) or isinstance(obj, uuid.UUID):
return str(obj)
return json.JSONEncoder.default(self, obj)
| true | true |
f73d1e44eab9cf821b354de9774a9018ed75b8a1 | 742 | py | Python | tests/test_authorize.py | dutkiewicz/adform-api | 5b670ea971c261565d1fe4cf7c18b2e109f8449d | [
"MIT"
] | null | null | null | tests/test_authorize.py | dutkiewicz/adform-api | 5b670ea971c261565d1fe4cf7c18b2e109f8449d | [
"MIT"
] | 6 | 2019-11-29T04:53:15.000Z | 2020-06-29T04:41:24.000Z | tests/test_authorize.py | dutkiewicz/adform-api | 5b670ea971c261565d1fe4cf7c18b2e109f8449d | [
"MIT"
] | null | null | null | import configparser
import unittest
import adform
class AuthorizeTestCase(unittest.TestCase):
def setUp(self):
config = configparser.ConfigParser()
config.read('config.ini')
self.client_id = config['DEFAULT']['CLIENT_ID']
self.client_secret = config['DEFAULT']['CLIENT_SECRET']
def test_token_repr_is_str(self):
token = adform.auth.Authorize(self.client_id, self.client_secret)
self.assertIsInstance(repr(token), str)
def test_raise_auth_exception(self):
with self.assertRaises(adform.exceptions.AuthorizeError):
self.client_id += 'XXX'
adform.auth.Authorize(self.client_id, self.client_secret)
if __name__ == '__main__':
unittest.main()
| 27.481481 | 73 | 0.691375 | import configparser
import unittest
import adform
class AuthorizeTestCase(unittest.TestCase):
def setUp(self):
config = configparser.ConfigParser()
config.read('config.ini')
self.client_id = config['DEFAULT']['CLIENT_ID']
self.client_secret = config['DEFAULT']['CLIENT_SECRET']
def test_token_repr_is_str(self):
token = adform.auth.Authorize(self.client_id, self.client_secret)
self.assertIsInstance(repr(token), str)
def test_raise_auth_exception(self):
with self.assertRaises(adform.exceptions.AuthorizeError):
self.client_id += 'XXX'
adform.auth.Authorize(self.client_id, self.client_secret)
if __name__ == '__main__':
unittest.main()
| true | true |
f73d1e7b6ef3cb6a6ce2651fdcee3b1d5ed53155 | 83 | py | Python | Jour 3/myNumpy.py | bellash13/SmartAcademyPython | 44d0f6db0fcdcbbf1449a45b073a2b3182a19714 | [
"MIT"
] | null | null | null | Jour 3/myNumpy.py | bellash13/SmartAcademyPython | 44d0f6db0fcdcbbf1449a45b073a2b3182a19714 | [
"MIT"
] | null | null | null | Jour 3/myNumpy.py | bellash13/SmartAcademyPython | 44d0f6db0fcdcbbf1449a45b073a2b3182a19714 | [
"MIT"
] | null | null | null | import numpy as np
a = np.array([[1,2,3,4], [5,6,7,8], [9,10,11,12]])
print(len(a)) | 27.666667 | 50 | 0.566265 | import numpy as np
a = np.array([[1,2,3,4], [5,6,7,8], [9,10,11,12]])
print(len(a)) | true | true |
f73d1ed062d1e63988ff94ecca50aa7e8e2f2a26 | 3,562 | py | Python | src/convmag/main.py | tgwoodcock/convmag | d1aec42b2735c66601652a679dac732ac944a888 | [
"MIT"
] | null | null | null | src/convmag/main.py | tgwoodcock/convmag | d1aec42b2735c66601652a679dac732ac944a888 | [
"MIT"
] | null | null | null | src/convmag/main.py | tgwoodcock/convmag | d1aec42b2735c66601652a679dac732ac944a888 | [
"MIT"
] | null | null | null | """
This module contains the code for the interactive conversion
of units at the command line.
@author: tgwoodcock
"""
from . import convmag_functions as cm
# interactive conversion
def main():
CONVERTING = True
print("*****Conversion between magnetic units.*****")
print("\nAt the 'Input:' promt, enter:")
print("[value startunit endunit] e.g. 6 T A/m,")
print("[units] to list the available units,")
print("[conv] to list the conversion factors or")
print("[q] to quit.")
while CONVERTING:
r = input("\nInput: ")
if r == "q":
CONVERTING = False
elif r == "units":
print("\nThe base units available for conversion are:")
print("\n".join(cm.units)+"\nmuB/fu")
print("\nThe prefactors available for any base unit are:",
", ".join(cm.prefactors))
elif r == "conv":
lgst = max(map(len, cm.units))
print("\nThe conversions between base units available are:")
for k in list(cm.convmag.keys()):
St, En = k.split("_")
print(f"{St:>{lgst}} <-> {En:<{lgst}}: {cm.convmag[k]}")
print(f"{'muB/fu':>{lgst}} <-> {'T':<{lgst}}: requires user input")
print("\nINFO: the factors given above are for the forward conversion")
print("INFO: permeability of free space, MU_0 = 4 * 3.14159 * 1e-7 H/m (== Vs/Am)")
print("INFO: Bohr magneton, MU_B = 9.274015e-24 Am^2")
print(" (muB is the unit string for conversions with Bohr magnetons)")
print("INFO: prefactors available for any base unit:",
", ".join(cm.prefactors))
else:
val = float(r.split(" ")[0])
startunit = r.split(" ")[1]
endunit = r.split(" ")[2]
if "muB/fu" in [startunit, endunit] and "T" in [startunit, endunit]:
print("\n***INFO: muB per formula unit <-> T***\n")
print("Please enter lattice parameters: a b c in Angstrom")
lp = input("a b c: ")
a = float(lp.split(" ")[0])
b = float(lp.split(" ")[1])
c = float(lp.split(" ")[2])
print("\nLimited to orthogonal or hexagonal unit cells:")
gamma = input("Please enter gamma in deg. (90 or 120): ")
if gamma == "120":
vol = cm.calculate_unitcell_volume(a, b, c, gamma=120)
elif gamma == "90":
vol = cm.calculate_unitcell_volume(a, b, c)
vol = vol * (1E-10)**3 # to get m^3 from A^3
print("Please enter the number of formula units per unit cell:")
num_fu = int(input("f.u./unit cell: "))
if startunit == "muB/fu":
Tesla = cm.muB_per_fu_to_Tesla(val, num_fu, vol)
s1 = f"\n{val} muB per f.u. = {Tesla:.5f} T"
s2 = f" ({num_fu:d} f.u./unit cell, "
s3 = f"cell volume = {vol:.3e} m^3)"
print("".join([s1, s2, s3]))
elif startunit == "T":
muB_fu = cm.Tesla_to_muB_per_fu(val, num_fu, vol)
s1 = f"\n{val} T = {muB_fu:.5f} muB per f.u."
s2 = f" ({num_fu:d} f.u./unit cell, "
s3 = f"cell volume = {vol:.3e} m^3)"
print("".join([s1, s2, s3]))
else:
cm.convert_unit(val, startunit, endunit, verbose=True)
| 45.088608 | 95 | 0.49607 |
from . import convmag_functions as cm
def main():
CONVERTING = True
print("*****Conversion between magnetic units.*****")
print("\nAt the 'Input:' promt, enter:")
print("[value startunit endunit] e.g. 6 T A/m,")
print("[units] to list the available units,")
print("[conv] to list the conversion factors or")
print("[q] to quit.")
while CONVERTING:
r = input("\nInput: ")
if r == "q":
CONVERTING = False
elif r == "units":
print("\nThe base units available for conversion are:")
print("\n".join(cm.units)+"\nmuB/fu")
print("\nThe prefactors available for any base unit are:",
", ".join(cm.prefactors))
elif r == "conv":
lgst = max(map(len, cm.units))
print("\nThe conversions between base units available are:")
for k in list(cm.convmag.keys()):
St, En = k.split("_")
print(f"{St:>{lgst}} <-> {En:<{lgst}}: {cm.convmag[k]}")
print(f"{'muB/fu':>{lgst}} <-> {'T':<{lgst}}: requires user input")
print("\nINFO: the factors given above are for the forward conversion")
print("INFO: permeability of free space, MU_0 = 4 * 3.14159 * 1e-7 H/m (== Vs/Am)")
print("INFO: Bohr magneton, MU_B = 9.274015e-24 Am^2")
print(" (muB is the unit string for conversions with Bohr magnetons)")
print("INFO: prefactors available for any base unit:",
", ".join(cm.prefactors))
else:
val = float(r.split(" ")[0])
startunit = r.split(" ")[1]
endunit = r.split(" ")[2]
if "muB/fu" in [startunit, endunit] and "T" in [startunit, endunit]:
print("\n***INFO: muB per formula unit <-> T***\n")
print("Please enter lattice parameters: a b c in Angstrom")
lp = input("a b c: ")
a = float(lp.split(" ")[0])
b = float(lp.split(" ")[1])
c = float(lp.split(" ")[2])
print("\nLimited to orthogonal or hexagonal unit cells:")
gamma = input("Please enter gamma in deg. (90 or 120): ")
if gamma == "120":
vol = cm.calculate_unitcell_volume(a, b, c, gamma=120)
elif gamma == "90":
vol = cm.calculate_unitcell_volume(a, b, c)
vol = vol * (1E-10)**3
print("Please enter the number of formula units per unit cell:")
num_fu = int(input("f.u./unit cell: "))
if startunit == "muB/fu":
Tesla = cm.muB_per_fu_to_Tesla(val, num_fu, vol)
s1 = f"\n{val} muB per f.u. = {Tesla:.5f} T"
s2 = f" ({num_fu:d} f.u./unit cell, "
s3 = f"cell volume = {vol:.3e} m^3)"
print("".join([s1, s2, s3]))
elif startunit == "T":
muB_fu = cm.Tesla_to_muB_per_fu(val, num_fu, vol)
s1 = f"\n{val} T = {muB_fu:.5f} muB per f.u."
s2 = f" ({num_fu:d} f.u./unit cell, "
s3 = f"cell volume = {vol:.3e} m^3)"
print("".join([s1, s2, s3]))
else:
cm.convert_unit(val, startunit, endunit, verbose=True)
| true | true |
f73d1eebbf537262d9995df1b48005f623211cd7 | 3,703 | py | Python | tools/filelife.py | figozhang/bcc | 0abd93e5f908a930f87007937f4028aecc13975b | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2017-01-07T12:10:31.000Z | 2017-01-07T12:10:31.000Z | tools/filelife.py | Agares/bcc | 0abd93e5f908a930f87007937f4028aecc13975b | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tools/filelife.py | Agares/bcc | 0abd93e5f908a930f87007937f4028aecc13975b | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2018-10-30T07:57:13.000Z | 2021-06-15T02:29:44.000Z | #!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# filelife Trace the lifespan of short-lived files.
# For Linux, uses BCC, eBPF. Embedded C.
#
# This traces the creation and deletion of files, providing information
# on who deleted the file, the file age, and the file name. The intent is to
# provide information on short-lived files, for debugging or performance
# analysis.
#
# USAGE: filelife [-h] [-p PID]
#
# Copyright 2016 Netflix, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 08-Feb-2015 Brendan Gregg Created this.
# 17-Feb-2016 Allan McAleavy updated for BPF_PERF_OUTPUT
from __future__ import print_function
from bcc import BPF
import argparse
from time import strftime
import ctypes as ct
# arguments
examples = """examples:
./filelife # trace all stat() syscalls
./filelife -p 181 # only trace PID 181
"""
parser = argparse.ArgumentParser(
description="Trace stat() syscalls",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-p", "--pid",
help="trace this PID only")
args = parser.parse_args()
debug = 0
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/fs.h>
#include <linux/sched.h>
struct data_t {
u32 pid;
u64 delta;
char comm[TASK_COMM_LEN];
char fname[DNAME_INLINE_LEN];
};
BPF_HASH(birth, struct dentry *);
BPF_PERF_OUTPUT(events);
// trace file creation time
int trace_create(struct pt_regs *ctx, struct inode *dir, struct dentry *dentry)
{
u32 pid = bpf_get_current_pid_tgid();
FILTER
u64 ts = bpf_ktime_get_ns();
birth.update(&dentry, &ts);
return 0;
};
// trace file deletion and output details
int trace_unlink(struct pt_regs *ctx, struct inode *dir, struct dentry *dentry)
{
struct data_t data = {};
u32 pid = bpf_get_current_pid_tgid();
FILTER
u64 *tsp, delta;
tsp = birth.lookup(&dentry);
if (tsp == 0) {
return 0; // missed create
}
delta = (bpf_ktime_get_ns() - *tsp) / 1000000;
birth.delete(&dentry);
if (dentry->d_name.len == 0)
return 0;
if (bpf_get_current_comm(&data.comm, sizeof(data.comm)) == 0) {
data.pid = pid;
data.delta = delta;
bpf_probe_read(&data.fname, sizeof(data.fname),
(void *)dentry->d_name.name);
}
events.perf_submit(ctx, &data, sizeof(data));
return 0;
}
"""
TASK_COMM_LEN = 16 # linux/sched.h
DNAME_INLINE_LEN = 255 # linux/dcache.h
class Data(ct.Structure):
_fields_ = [
("pid", ct.c_ulonglong),
("delta", ct.c_ulonglong),
("comm", ct.c_char * TASK_COMM_LEN),
("fname", ct.c_char * DNAME_INLINE_LEN)
]
if args.pid:
bpf_text = bpf_text.replace('FILTER',
'if (pid != %s) { return 0; }' % args.pid)
else:
bpf_text = bpf_text.replace('FILTER', '')
if debug:
print(bpf_text)
# initialize BPF
b = BPF(text=bpf_text)
b.attach_kprobe(event="vfs_create", fn_name="trace_create")
# newer kernels (say, 4.8) may don't fire vfs_create, so record (or overwrite)
# the timestamp in security_inode_create():
b.attach_kprobe(event="security_inode_create", fn_name="trace_create")
b.attach_kprobe(event="vfs_unlink", fn_name="trace_unlink")
# header
print("%-8s %-6s %-16s %-7s %s" % ("TIME", "PID", "COMM", "AGE(s)", "FILE"))
# process event
def print_event(cpu, data, size):
event = ct.cast(data, ct.POINTER(Data)).contents
print("%-8s %-6d %-16s %-7.2f %s" % (strftime("%H:%M:%S"), event.pid,
event.comm, float(event.delta) / 1000, event.fname))
b["events"].open_perf_buffer(print_event)
while 1:
b.kprobe_poll()
| 26.45 | 79 | 0.660546 |
from __future__ import print_function
from bcc import BPF
import argparse
from time import strftime
import ctypes as ct
examples = """examples:
./filelife # trace all stat() syscalls
./filelife -p 181 # only trace PID 181
"""
parser = argparse.ArgumentParser(
description="Trace stat() syscalls",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-p", "--pid",
help="trace this PID only")
args = parser.parse_args()
debug = 0
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/fs.h>
#include <linux/sched.h>
struct data_t {
u32 pid;
u64 delta;
char comm[TASK_COMM_LEN];
char fname[DNAME_INLINE_LEN];
};
BPF_HASH(birth, struct dentry *);
BPF_PERF_OUTPUT(events);
// trace file creation time
int trace_create(struct pt_regs *ctx, struct inode *dir, struct dentry *dentry)
{
u32 pid = bpf_get_current_pid_tgid();
FILTER
u64 ts = bpf_ktime_get_ns();
birth.update(&dentry, &ts);
return 0;
};
// trace file deletion and output details
int trace_unlink(struct pt_regs *ctx, struct inode *dir, struct dentry *dentry)
{
struct data_t data = {};
u32 pid = bpf_get_current_pid_tgid();
FILTER
u64 *tsp, delta;
tsp = birth.lookup(&dentry);
if (tsp == 0) {
return 0; // missed create
}
delta = (bpf_ktime_get_ns() - *tsp) / 1000000;
birth.delete(&dentry);
if (dentry->d_name.len == 0)
return 0;
if (bpf_get_current_comm(&data.comm, sizeof(data.comm)) == 0) {
data.pid = pid;
data.delta = delta;
bpf_probe_read(&data.fname, sizeof(data.fname),
(void *)dentry->d_name.name);
}
events.perf_submit(ctx, &data, sizeof(data));
return 0;
}
"""
TASK_COMM_LEN = 16
DNAME_INLINE_LEN = 255
class Data(ct.Structure):
_fields_ = [
("pid", ct.c_ulonglong),
("delta", ct.c_ulonglong),
("comm", ct.c_char * TASK_COMM_LEN),
("fname", ct.c_char * DNAME_INLINE_LEN)
]
if args.pid:
bpf_text = bpf_text.replace('FILTER',
'if (pid != %s) { return 0; }' % args.pid)
else:
bpf_text = bpf_text.replace('FILTER', '')
if debug:
print(bpf_text)
b = BPF(text=bpf_text)
b.attach_kprobe(event="vfs_create", fn_name="trace_create")
# the timestamp in security_inode_create():
b.attach_kprobe(event="security_inode_create", fn_name="trace_create")
b.attach_kprobe(event="vfs_unlink", fn_name="trace_unlink")
# header
print("%-8s %-6s %-16s %-7s %s" % ("TIME", "PID", "COMM", "AGE(s)", "FILE"))
# process event
def print_event(cpu, data, size):
event = ct.cast(data, ct.POINTER(Data)).contents
print("%-8s %-6d %-16s %-7.2f %s" % (strftime("%H:%M:%S"), event.pid,
event.comm, float(event.delta) / 1000, event.fname))
b["events"].open_perf_buffer(print_event)
while 1:
b.kprobe_poll()
| true | true |
f73d1f9c8efe120b86007ce12c1b4e79557e3ee1 | 631 | py | Python | wildlifecompliance/migrations/0368_auto_20200107_1627.py | preranaandure/wildlifecompliance | bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5 | [
"Apache-2.0"
] | 1 | 2020-12-07T17:12:40.000Z | 2020-12-07T17:12:40.000Z | wildlifecompliance/migrations/0368_auto_20200107_1627.py | preranaandure/wildlifecompliance | bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5 | [
"Apache-2.0"
] | 14 | 2020-01-08T08:08:26.000Z | 2021-03-19T22:59:46.000Z | wildlifecompliance/migrations/0368_auto_20200107_1627.py | preranaandure/wildlifecompliance | bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5 | [
"Apache-2.0"
] | 15 | 2020-01-08T08:02:28.000Z | 2021-11-03T06:48:32.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2020-01-07 08:27
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wildlifecompliance', '0367_globalsettings'),
]
operations = [
migrations.AlterField(
model_name='globalsettings',
name='key',
field=models.CharField(choices=[('document_object_disposal_period', 'Document Object Disposal Period'), ('physical_object_disposal_period', 'Physical Object Disposal Period')], max_length=255, unique=True),
),
]
| 30.047619 | 218 | 0.675119 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wildlifecompliance', '0367_globalsettings'),
]
operations = [
migrations.AlterField(
model_name='globalsettings',
name='key',
field=models.CharField(choices=[('document_object_disposal_period', 'Document Object Disposal Period'), ('physical_object_disposal_period', 'Physical Object Disposal Period')], max_length=255, unique=True),
),
]
| true | true |
f73d1fd0b75425e47bd6ece71b944ad623e88d7a | 9,869 | py | Python | samples/VendorReadWriteProperty.py | cbergmiller/bacpypes | 7b1f2e989787c2c1f807680fee5ee7a71b3689ab | [
"MIT"
] | 1 | 2018-01-11T13:10:15.000Z | 2018-01-11T13:10:15.000Z | samples/VendorReadWriteProperty.py | cbergmiller/bacpypes | 7b1f2e989787c2c1f807680fee5ee7a71b3689ab | [
"MIT"
] | null | null | null | samples/VendorReadWriteProperty.py | cbergmiller/bacpypes | 7b1f2e989787c2c1f807680fee5ee7a71b3689ab | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
This application presents a 'console' prompt to the user asking for commands.
For 'read' commands it will create ReadPropertyRequest PDUs, then lines up the
coorresponding ReadPropertyACK and prints the value. For 'write' commands it
will create WritePropertyRequst PDUs and prints out a simple acknowledgement.
"""
import sys
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from bacpypes.consolelogging import ConfigArgumentParser
from bacpypes.consolecmd import ConsoleCmd
from bacpypes.core import run, enable_sleeping
from bacpypes.iocb import IOCB
from bacpypes.pdu import Address
from bacpypes.object import get_object_class, get_datatype
from bacpypes.apdu import Error, AbortPDU, SimpleAckPDU, \
ReadPropertyRequest, ReadPropertyACK, WritePropertyRequest
from bacpypes.primitivedata import Tag, Null, Atomic, Integer, Unsigned, Real
from bacpypes.constructeddata import Array, Any
from bacpypes.app import BIPSimpleApplication
from bacpypes.local.device import LocalDeviceObject
import VendorAVObject
# some debugging
_debug = 0
_log = ModuleLogger(globals())
# globals
this_application = None
#
# ReadWritePropertyConsoleCmd
#
@bacpypes_debugging
class ReadWritePropertyConsoleCmd(ConsoleCmd):
def do_read(self, args):
"""read <addr> <type> <inst> <prop> [ <indx> ]"""
args = args.split()
if _debug: ReadWritePropertyConsoleCmd._debug("do_read %r", args)
try:
addr, obj_type, obj_inst, prop_id = args[:4]
if obj_type.isdigit():
obj_type = int(obj_type)
elif not get_object_class(obj_type, VendorAVObject.vendor_id):
raise ValueError("unknown object type")
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_type: %r", obj_type)
obj_inst = int(obj_inst)
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_inst: %r", obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - prop_id: %r", prop_id)
datatype = get_datatype(obj_type, prop_id, VendorAVObject.vendor_id)
if not datatype:
raise ValueError("invalid property for object type")
# build a request
request = ReadPropertyRequest(
objectIdentifier=(obj_type, obj_inst),
propertyIdentifier=prop_id,
)
request.pduDestination = Address(addr)
if len(args) == 5:
request.propertyArrayIndex = int(args[4])
if _debug: ReadWritePropertyConsoleCmd._debug(" - request: %r", request)
# make an IOCB
iocb = IOCB(request)
if _debug: ReadWritePropertyConsoleCmd._debug(" - iocb: %r", iocb)
# give it to the application
this_application.request_io(iocb)
# wait for it to complete
iocb.wait()
# do something for success
if iocb.ioResponse:
apdu = iocb.ioResponse
# peek at the value tag
value_tag = apdu.propertyValue.tagList.Peek()
if _debug: ReadWritePropertyConsoleCmd._debug(" - value_tag: %r", value_tag)
# make sure that it is application tagged
if value_tag.tagClass != Tag.applicationTagClass:
sys.stdout.write("value is not application encoded\n")
else:
# find the datatype
datatype = Tag._app_tag_class[value_tag.tagNumber]
if _debug: ReadWritePropertyConsoleCmd._debug(" - datatype: %r", datatype)
if not datatype:
raise TypeError("unknown datatype")
# cast out the value
value = apdu.propertyValue.cast_out(datatype)
if _debug: ReadWritePropertyConsoleCmd._debug(" - value: %r", value)
sys.stdout.write("%s (%s)\n" % (value, datatype))
sys.stdout.flush()
# do something for error/reject/abort
if iocb.ioError:
sys.stdout.write(str(iocb.ioError) + '\n')
except Exception as error:
ReadWritePropertyConsoleCmd._exception("exception: %r", error)
def do_write(self, args):
"""write <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ]"""
args = args.split()
ReadWritePropertyConsoleCmd._debug("do_write %r", args)
try:
addr, obj_type, obj_inst, prop_id = args[:4]
if obj_type.isdigit():
obj_type = int(obj_type)
elif not get_object_class(obj_type, VendorAVObject.vendor_id):
raise ValueError("unknown object type")
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_type: %r", obj_type)
obj_inst = int(obj_inst)
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_inst: %r", obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - prop_id: %r", prop_id)
value = args[4]
indx = None
if len(args) >= 6:
if args[5] != "-":
indx = int(args[5])
if _debug: ReadWritePropertyConsoleCmd._debug(" - indx: %r", indx)
priority = None
if len(args) >= 7:
priority = int(args[6])
if _debug: ReadWritePropertyConsoleCmd._debug(" - priority: %r", priority)
# get the datatype
datatype = get_datatype(obj_type, prop_id, VendorAVObject.vendor_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - datatype: %r", datatype)
# change atomic values into something encodeable, null is a special case
if (value == 'null'):
value = Null()
elif issubclass(datatype, Atomic):
if datatype is Integer:
value = int(value)
elif datatype is Real:
value = float(value)
elif datatype is Unsigned:
value = int(value)
value = datatype(value)
elif issubclass(datatype, Array) and (indx is not None):
if indx == 0:
value = Integer(value)
elif issubclass(datatype.subtype, Atomic):
value = datatype.subtype(value)
elif not isinstance(value, datatype.subtype):
raise TypeError("invalid result datatype, expecting %s" % (datatype.subtype.__name__,))
elif not isinstance(value, datatype):
raise TypeError("invalid result datatype, expecting %s" % (datatype.__name__,))
if _debug: ReadWritePropertyConsoleCmd._debug(" - encodeable value: %r %s", value, type(value))
# build a request
request = WritePropertyRequest(
objectIdentifier=(obj_type, obj_inst),
propertyIdentifier=prop_id
)
request.pduDestination = Address(addr)
# save the value
request.propertyValue = Any()
try:
request.propertyValue.cast_in(value)
except Exception as error:
ReadWritePropertyConsoleCmd._exception("WriteProperty cast error: %r", error)
# optional array index
if indx is not None:
request.propertyArrayIndex = indx
# optional priority
if priority is not None:
request.priority = priority
if _debug: ReadWritePropertyConsoleCmd._debug(" - request: %r", request)
# make an IOCB
iocb = IOCB(request)
if _debug: ReadWritePropertyConsoleCmd._debug(" - iocb: %r", iocb)
# give it to the application
this_application.request_io(iocb)
# wait for it to complete
iocb.wait()
# do something for success
if iocb.ioResponse:
sys.stdout.write("ack\n")
# do something for error/reject/abort
if iocb.ioError:
sys.stdout.write(str(iocb.ioError) + '\n')
except Exception as error:
ReadWritePropertyConsoleCmd._exception("exception: %r", error)
#
# main
#
def main():
global this_application
# parse the command line arguments
args = ConfigArgumentParser(description=__doc__).parse_args()
if _debug: _log.debug("initialization")
if _debug: _log.debug(" - args: %r", args)
# make a device object
this_device = LocalDeviceObject(
objectName=args.ini.objectname,
objectIdentifier=int(args.ini.objectidentifier),
maxApduLengthAccepted=int(args.ini.maxapdulengthaccepted),
segmentationSupported=args.ini.segmentationsupported,
vendorIdentifier=int(args.ini.vendoridentifier),
)
# make a simple application
this_application = BIPSimpleApplication(this_device, args.ini.address)
# get the services supported
services_supported = this_application.get_services_supported()
if _debug: _log.debug(" - services_supported: %r", services_supported)
# let the device object know
this_device.protocolServicesSupported = services_supported.value
# make a console
this_console = ReadWritePropertyConsoleCmd()
if _debug: _log.debug(" - this_console: %r", this_console)
# enable sleeping will help with threads
enable_sleeping()
_log.debug("running")
run()
_log.debug("fini")
if __name__ == '__main__':
main()
| 35.37276 | 110 | 0.602594 |
import sys
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from bacpypes.consolelogging import ConfigArgumentParser
from bacpypes.consolecmd import ConsoleCmd
from bacpypes.core import run, enable_sleeping
from bacpypes.iocb import IOCB
from bacpypes.pdu import Address
from bacpypes.object import get_object_class, get_datatype
from bacpypes.apdu import Error, AbortPDU, SimpleAckPDU, \
ReadPropertyRequest, ReadPropertyACK, WritePropertyRequest
from bacpypes.primitivedata import Tag, Null, Atomic, Integer, Unsigned, Real
from bacpypes.constructeddata import Array, Any
from bacpypes.app import BIPSimpleApplication
from bacpypes.local.device import LocalDeviceObject
import VendorAVObject
_debug = 0
_log = ModuleLogger(globals())
this_application = None
@bacpypes_debugging
class ReadWritePropertyConsoleCmd(ConsoleCmd):
def do_read(self, args):
args = args.split()
if _debug: ReadWritePropertyConsoleCmd._debug("do_read %r", args)
try:
addr, obj_type, obj_inst, prop_id = args[:4]
if obj_type.isdigit():
obj_type = int(obj_type)
elif not get_object_class(obj_type, VendorAVObject.vendor_id):
raise ValueError("unknown object type")
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_type: %r", obj_type)
obj_inst = int(obj_inst)
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_inst: %r", obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - prop_id: %r", prop_id)
datatype = get_datatype(obj_type, prop_id, VendorAVObject.vendor_id)
if not datatype:
raise ValueError("invalid property for object type")
request = ReadPropertyRequest(
objectIdentifier=(obj_type, obj_inst),
propertyIdentifier=prop_id,
)
request.pduDestination = Address(addr)
if len(args) == 5:
request.propertyArrayIndex = int(args[4])
if _debug: ReadWritePropertyConsoleCmd._debug(" - request: %r", request)
iocb = IOCB(request)
if _debug: ReadWritePropertyConsoleCmd._debug(" - iocb: %r", iocb)
this_application.request_io(iocb)
iocb.wait()
if iocb.ioResponse:
apdu = iocb.ioResponse
value_tag = apdu.propertyValue.tagList.Peek()
if _debug: ReadWritePropertyConsoleCmd._debug(" - value_tag: %r", value_tag)
if value_tag.tagClass != Tag.applicationTagClass:
sys.stdout.write("value is not application encoded\n")
else:
datatype = Tag._app_tag_class[value_tag.tagNumber]
if _debug: ReadWritePropertyConsoleCmd._debug(" - datatype: %r", datatype)
if not datatype:
raise TypeError("unknown datatype")
value = apdu.propertyValue.cast_out(datatype)
if _debug: ReadWritePropertyConsoleCmd._debug(" - value: %r", value)
sys.stdout.write("%s (%s)\n" % (value, datatype))
sys.stdout.flush()
if iocb.ioError:
sys.stdout.write(str(iocb.ioError) + '\n')
except Exception as error:
ReadWritePropertyConsoleCmd._exception("exception: %r", error)
def do_write(self, args):
args = args.split()
ReadWritePropertyConsoleCmd._debug("do_write %r", args)
try:
addr, obj_type, obj_inst, prop_id = args[:4]
if obj_type.isdigit():
obj_type = int(obj_type)
elif not get_object_class(obj_type, VendorAVObject.vendor_id):
raise ValueError("unknown object type")
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_type: %r", obj_type)
obj_inst = int(obj_inst)
if _debug: ReadWritePropertyConsoleCmd._debug(" - obj_inst: %r", obj_inst)
if prop_id.isdigit():
prop_id = int(prop_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - prop_id: %r", prop_id)
value = args[4]
indx = None
if len(args) >= 6:
if args[5] != "-":
indx = int(args[5])
if _debug: ReadWritePropertyConsoleCmd._debug(" - indx: %r", indx)
priority = None
if len(args) >= 7:
priority = int(args[6])
if _debug: ReadWritePropertyConsoleCmd._debug(" - priority: %r", priority)
datatype = get_datatype(obj_type, prop_id, VendorAVObject.vendor_id)
if _debug: ReadWritePropertyConsoleCmd._debug(" - datatype: %r", datatype)
if (value == 'null'):
value = Null()
elif issubclass(datatype, Atomic):
if datatype is Integer:
value = int(value)
elif datatype is Real:
value = float(value)
elif datatype is Unsigned:
value = int(value)
value = datatype(value)
elif issubclass(datatype, Array) and (indx is not None):
if indx == 0:
value = Integer(value)
elif issubclass(datatype.subtype, Atomic):
value = datatype.subtype(value)
elif not isinstance(value, datatype.subtype):
raise TypeError("invalid result datatype, expecting %s" % (datatype.subtype.__name__,))
elif not isinstance(value, datatype):
raise TypeError("invalid result datatype, expecting %s" % (datatype.__name__,))
if _debug: ReadWritePropertyConsoleCmd._debug(" - encodeable value: %r %s", value, type(value))
request = WritePropertyRequest(
objectIdentifier=(obj_type, obj_inst),
propertyIdentifier=prop_id
)
request.pduDestination = Address(addr)
request.propertyValue = Any()
try:
request.propertyValue.cast_in(value)
except Exception as error:
ReadWritePropertyConsoleCmd._exception("WriteProperty cast error: %r", error)
if indx is not None:
request.propertyArrayIndex = indx
if priority is not None:
request.priority = priority
if _debug: ReadWritePropertyConsoleCmd._debug(" - request: %r", request)
iocb = IOCB(request)
if _debug: ReadWritePropertyConsoleCmd._debug(" - iocb: %r", iocb)
this_application.request_io(iocb)
iocb.wait()
if iocb.ioResponse:
sys.stdout.write("ack\n")
if iocb.ioError:
sys.stdout.write(str(iocb.ioError) + '\n')
except Exception as error:
ReadWritePropertyConsoleCmd._exception("exception: %r", error)
def main():
global this_application
args = ConfigArgumentParser(description=__doc__).parse_args()
if _debug: _log.debug("initialization")
if _debug: _log.debug(" - args: %r", args)
this_device = LocalDeviceObject(
objectName=args.ini.objectname,
objectIdentifier=int(args.ini.objectidentifier),
maxApduLengthAccepted=int(args.ini.maxapdulengthaccepted),
segmentationSupported=args.ini.segmentationsupported,
vendorIdentifier=int(args.ini.vendoridentifier),
)
this_application = BIPSimpleApplication(this_device, args.ini.address)
services_supported = this_application.get_services_supported()
if _debug: _log.debug(" - services_supported: %r", services_supported)
this_device.protocolServicesSupported = services_supported.value
this_console = ReadWritePropertyConsoleCmd()
if _debug: _log.debug(" - this_console: %r", this_console)
enable_sleeping()
_log.debug("running")
run()
_log.debug("fini")
if __name__ == '__main__':
main()
| true | true |
f73d207bfe6d808270ec68670e0429f84853f359 | 47 | py | Python | optirocket/library/__init__.py | Keith-Maxwell/OptiRocket | d99ac8d2b868b60a2bbf32f5a8a31ecdcaeea5b0 | [
"MIT"
] | null | null | null | optirocket/library/__init__.py | Keith-Maxwell/OptiRocket | d99ac8d2b868b60a2bbf32f5a8a31ecdcaeea5b0 | [
"MIT"
] | 3 | 2021-01-14T15:09:51.000Z | 2021-02-12T17:05:18.000Z | optirocket/library/__init__.py | Keith-Maxwell/OptiRocket | d99ac8d2b868b60a2bbf32f5a8a31ecdcaeea5b0 | [
"MIT"
] | 1 | 2021-01-11T02:34:29.000Z | 2021-01-11T02:34:29.000Z | from . import constants, orbit_lib, rich_print
| 23.5 | 46 | 0.808511 | from . import constants, orbit_lib, rich_print
| true | true |
f73d21ec3854b7b19b2c5e02a0cc60320e1d97fa | 3,715 | py | Python | triple_agent/parsing/timeline/parse_full_timeline.py | andrewzwicky/TripleAgent | 8d056df5c53a3d264dc778bad6771a0a2f62e7e7 | [
"MIT"
] | 3 | 2020-04-25T11:42:03.000Z | 2020-07-08T16:38:26.000Z | triple_agent/parsing/timeline/parse_full_timeline.py | andrewzwicky/TripleAgent | 8d056df5c53a3d264dc778bad6771a0a2f62e7e7 | [
"MIT"
] | 17 | 2019-08-11T19:09:55.000Z | 2021-03-30T17:12:28.000Z | triple_agent/parsing/timeline/parse_full_timeline.py | andrewzwicky/TripleAgent | 8d056df5c53a3d264dc778bad6771a0a2f62e7e7 | [
"MIT"
] | null | null | null | import itertools
import logging
import time
from typing import List, Callable
from pathlib import Path
from triple_agent.constants.paths import DEBUG_CAPTURES
from triple_agent.classes.capture_debug_pictures import capture_debug_picture
from triple_agent.classes.timeline import TimelineCoherency
from triple_agent.parsing.timeline.parse_timeline import (
TimelineParseException,
TimelineOddNumberScreenshots,
TimelineMismatchedElapsedScreenshots,
parse_screenshot,
remove_overlap,
)
from triple_agent.classes.game import Game
from triple_agent.classes.timeline import Timeline, TimelineEvent
logger = logging.getLogger("triple_agent")
def merge_elapsed_screenshots(
events: List[List[TimelineEvent]],
) -> List[List[TimelineEvent]]:
if len(events) % 2 != 0:
# If there is an odd number, no way for them to be matched up
logger.warning("TimelineParseException odd number of screenshots supplied")
raise TimelineOddNumberScreenshots("Odd number of screenshots supplied")
possible_remaining = events[::2]
possible_elapsed = events[1::2]
for rem, elapse in zip(possible_remaining, possible_elapsed):
for _r, _e in zip(rem, elapse):
if (
_r.event != _e.event
or _r.actor != _e.actor
or _r.cast_name != _e.cast_name
and _r.elapsed_time is None
and _e.time is None
):
logger.warning(
"TimelineParseException mismatch between remaining and elapsed screenshots"
)
raise TimelineMismatchedElapsedScreenshots(
"Mismatch between remaining and elapsed screenshots"
)
# everything matches, proceed with merging
for rem, elapse in zip(possible_remaining, possible_elapsed):
for _r, _e in zip(rem, elapse):
_r.elapsed_time = _e.elapsed_time
return possible_remaining
def parse_full_timeline(
games: List[Game],
screenshot_iterator: Callable,
pickle_folder: Path,
json_folder: Path,
):
this_game_events = []
input("Hit Enter when ready, parsing begins 10 seconds later\n")
time.sleep(10)
for game_index, ss_index, screenshot, is_last in screenshot_iterator(games):
try:
this_game_events.append(parse_screenshot(screenshot))
if is_last:
if len(this_game_events) == ss_index:
this_game_events = merge_elapsed_screenshots(this_game_events)
flattened_lines = itertools.chain.from_iterable(this_game_events)
events = remove_overlap(flattened_lines)
timeline = Timeline(events)
games[game_index].timeline = timeline
coherency = games[game_index].is_timeline_coherent()
games[game_index].add_start_clock_seconds()
if coherency != TimelineCoherency.Coherent:
logger.error(
"INCOHERENT TIMELINE: %s %s\n",
games[game_index].uuid,
coherency,
)
else:
games[game_index].pickle(pickle_folder)
games[game_index].serialize_to_json(json_folder)
this_game_events = []
except TimelineParseException:
capture_debug_picture(
DEBUG_CAPTURES.joinpath("overall_failures"),
screenshot,
filename=f"{games[game_index].uuid}_{ss_index}.png",
)
this_game_events = []
return games
| 36.421569 | 95 | 0.62638 | import itertools
import logging
import time
from typing import List, Callable
from pathlib import Path
from triple_agent.constants.paths import DEBUG_CAPTURES
from triple_agent.classes.capture_debug_pictures import capture_debug_picture
from triple_agent.classes.timeline import TimelineCoherency
from triple_agent.parsing.timeline.parse_timeline import (
TimelineParseException,
TimelineOddNumberScreenshots,
TimelineMismatchedElapsedScreenshots,
parse_screenshot,
remove_overlap,
)
from triple_agent.classes.game import Game
from triple_agent.classes.timeline import Timeline, TimelineEvent
logger = logging.getLogger("triple_agent")
def merge_elapsed_screenshots(
events: List[List[TimelineEvent]],
) -> List[List[TimelineEvent]]:
if len(events) % 2 != 0:
logger.warning("TimelineParseException odd number of screenshots supplied")
raise TimelineOddNumberScreenshots("Odd number of screenshots supplied")
possible_remaining = events[::2]
possible_elapsed = events[1::2]
for rem, elapse in zip(possible_remaining, possible_elapsed):
for _r, _e in zip(rem, elapse):
if (
_r.event != _e.event
or _r.actor != _e.actor
or _r.cast_name != _e.cast_name
and _r.elapsed_time is None
and _e.time is None
):
logger.warning(
"TimelineParseException mismatch between remaining and elapsed screenshots"
)
raise TimelineMismatchedElapsedScreenshots(
"Mismatch between remaining and elapsed screenshots"
)
for rem, elapse in zip(possible_remaining, possible_elapsed):
for _r, _e in zip(rem, elapse):
_r.elapsed_time = _e.elapsed_time
return possible_remaining
def parse_full_timeline(
games: List[Game],
screenshot_iterator: Callable,
pickle_folder: Path,
json_folder: Path,
):
this_game_events = []
input("Hit Enter when ready, parsing begins 10 seconds later\n")
time.sleep(10)
for game_index, ss_index, screenshot, is_last in screenshot_iterator(games):
try:
this_game_events.append(parse_screenshot(screenshot))
if is_last:
if len(this_game_events) == ss_index:
this_game_events = merge_elapsed_screenshots(this_game_events)
flattened_lines = itertools.chain.from_iterable(this_game_events)
events = remove_overlap(flattened_lines)
timeline = Timeline(events)
games[game_index].timeline = timeline
coherency = games[game_index].is_timeline_coherent()
games[game_index].add_start_clock_seconds()
if coherency != TimelineCoherency.Coherent:
logger.error(
"INCOHERENT TIMELINE: %s %s\n",
games[game_index].uuid,
coherency,
)
else:
games[game_index].pickle(pickle_folder)
games[game_index].serialize_to_json(json_folder)
this_game_events = []
except TimelineParseException:
capture_debug_picture(
DEBUG_CAPTURES.joinpath("overall_failures"),
screenshot,
filename=f"{games[game_index].uuid}_{ss_index}.png",
)
this_game_events = []
return games
| true | true |
f73d225872243a0bfe5ee1078f0b8cdbc4f8c113 | 1,087 | py | Python | metaci/testresults/migrations/0006_testresultasset.py | sfdc-qbranch/MetaCI | 78ac0d2bccd2db381998321ebd71029dd5d9ab39 | [
"BSD-3-Clause"
] | 48 | 2018-10-24T14:52:06.000Z | 2022-03-25T21:14:50.000Z | metaci/testresults/migrations/0006_testresultasset.py | sfdc-qbranch/MetaCI | 78ac0d2bccd2db381998321ebd71029dd5d9ab39 | [
"BSD-3-Clause"
] | 2,034 | 2018-10-31T20:59:16.000Z | 2022-03-22T21:38:03.000Z | metaci/testresults/migrations/0006_testresultasset.py | sfdc-qbranch/MetaCI | 78ac0d2bccd2db381998321ebd71029dd5d9ab39 | [
"BSD-3-Clause"
] | 27 | 2018-12-24T18:16:23.000Z | 2021-12-15T17:57:27.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-08-24 14:34
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("testresults", "0005_testclass_test_type")]
operations = [
migrations.CreateModel(
name="TestResultAsset",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("asset", models.FileField(upload_to=b"")),
(
"result",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assets",
to="testresults.TestResult",
),
),
],
)
]
| 28.605263 | 68 | 0.448022 |
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("testresults", "0005_testclass_test_type")]
operations = [
migrations.CreateModel(
name="TestResultAsset",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("asset", models.FileField(upload_to=b"")),
(
"result",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assets",
to="testresults.TestResult",
),
),
],
)
]
| true | true |
f73d22af71eb0aa4999ea66aaf3dcb8e08d60436 | 354 | py | Python | other/dingding/dingtalk/api/rest/OapiAttendanceShiftSearchRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/rest/OapiAttendanceShiftSearchRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/rest/OapiAttendanceShiftSearchRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | '''
Created by auto_sdk on 2019.07.31
'''
from dingtalk.api.base import RestApi
class OapiAttendanceShiftSearchRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.op_user_id = None
self.shift_name = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.attendance.shift.search'
| 22.125 | 48 | 0.759887 | from dingtalk.api.base import RestApi
class OapiAttendanceShiftSearchRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.op_user_id = None
self.shift_name = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.attendance.shift.search'
| true | true |
f73d22f4908132b0a65462efa00808da6bb3e936 | 5,628 | py | Python | seathru_mono_e2e.py | benelot/sea-thru | e143b75eca7dcb6887a4ecc98691816e6b9d3b69 | [
"MIT"
] | 1 | 2021-06-17T07:16:28.000Z | 2021-06-17T07:16:28.000Z | seathru_mono_e2e.py | benelot/sea-thru | e143b75eca7dcb6887a4ecc98691816e6b9d3b69 | [
"MIT"
] | null | null | null | seathru_mono_e2e.py | benelot/sea-thru | e143b75eca7dcb6887a4ecc98691816e6b9d3b69 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function
import os
import argparse
import PIL.Image as pil
import rawpy
import torch
from torchvision import transforms
import numpy as np
import deps.monodepth2.networks as networks
from deps.monodepth2.utils import download_model_if_doesnt_exist
from seathru import preprocess_monodepth_depth_map, run_pipeline, estimate_sigma, denoise_tv_chambolle
def run(args):
"""Function to predict for a single image or folder of images
"""
assert args.model_name is not None, "You must specify the --model_name parameter; see README.md for an example"
if torch.cuda.is_available() and not args.no_cuda:
device = torch.device("cuda")
else:
device = torch.device("cpu")
download_model_if_doesnt_exist(args.model_name)
model_path = os.path.join("models", args.model_name)
print("-> Loading model from ", model_path)
encoder_path = os.path.join(model_path, "encoder.pth")
depth_decoder_path = os.path.join(model_path, "depth.pth")
# LOADING PRETRAINED MODEL
print(" Loading pretrained encoder")
encoder = networks.ResnetEncoder(18, False)
loaded_dict_enc = torch.load(encoder_path, map_location=device)
# extract the height and width of image that this model was trained with
feed_height = loaded_dict_enc['height']
feed_width = loaded_dict_enc['width']
filtered_dict_enc = {k: v for k, v in loaded_dict_enc.items() if k in encoder.state_dict()}
encoder.load_state_dict(filtered_dict_enc)
encoder.to(device)
encoder.eval()
print(" Loading pretrained decoder")
depth_decoder = networks.DepthDecoder(
num_ch_enc=encoder.num_ch_enc, scales=range(4))
loaded_dict = torch.load(depth_decoder_path, map_location=device)
depth_decoder.load_state_dict(loaded_dict)
depth_decoder.to(device)
depth_decoder.eval()
# Load image and preprocess
img = pil.fromarray(rawpy.imread(args.image).postprocess()) if args.raw else pil.open(args.image).convert('RGB')
img.thumbnail((args.size, args.size), pil.ANTIALIAS)
original_width, original_height = img.size
# img = exposure.equalize_adapthist(np.array(img), clip_limit=0.03)
# img = pil.fromarray((np.round(img * 255.0)).astype(np.uint8))
input_image = img.resize((feed_width, feed_height), pil.LANCZOS)
input_image = transforms.ToTensor()(input_image).unsqueeze(0)
print('Preprocessed image', flush=True)
# PREDICTION
input_image = input_image.to(device)
features = encoder(input_image)
outputs = depth_decoder(features)
disp = outputs[("disp", 0)]
disp_resized = torch.nn.functional.interpolate(
disp, (original_height, original_width), mode="bilinear", align_corners=False)
# Saving colormapped depth image
disp_resized_np = disp_resized.squeeze().cpu().detach().numpy()
mapped_im_depths = ((disp_resized_np - np.min(disp_resized_np)) / (
np.max(disp_resized_np) - np.min(disp_resized_np))).astype(np.float32)
print("Processed image", flush=True)
print('Loading image...', flush=True)
depths = preprocess_monodepth_depth_map(mapped_im_depths, args.monodepth_add_depth,
args.monodepth_multiply_depth)
recovered = run_pipeline(np.array(img) / 255.0, depths, args)
# recovered = exposure.equalize_adapthist(scale(np.array(recovered)), clip_limit=0.03)
sigma_est = estimate_sigma(recovered, multichannel=True, average_sigmas=True) / 10.0
recovered = denoise_tv_chambolle(recovered, sigma_est, multichannel=True)
im = pil.fromarray((np.round(recovered * 255.0)).astype(np.uint8))
from pathlib import Path
p = Path(args.image)
im.save(args.output.format(p.stem), format='png')
print('Done.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--image', required=True, help='Input image')
parser.add_argument('--output', default='outputs/{0}.png', help='Output filename')
parser.add_argument('--f', type=float, default=2.0, help='f value (controls brightness)')
parser.add_argument('--l', type=float, default=0.5, help='l value (controls balance of attenuation constants)')
parser.add_argument('--p', type=float, default=0.01, help='p value (controls locality of illuminant map)')
parser.add_argument('--min-depth', type=float, default=0.0,
help='Minimum depth value to use in estimations (range 0-1)')
parser.add_argument('--max-depth', type=float, default=1.0,
help='Replacement depth percentile value for invalid depths (range 0-1)')
parser.add_argument('--spread-data-fraction', type=float, default=0.05,
help='Require data to be this fraction of depth range away from each other in attenuation estimations')
parser.add_argument('--size', type=int, default=320, help='Size to output')
parser.add_argument('--monodepth-add-depth', type=float, default=2.0, help='Additive value for monodepth map')
parser.add_argument('--monodepth-multiply-depth', type=float, default=10.0,
help='Multiplicative value for monodepth map')
parser.add_argument('--model-name', type=str, default="mono_1024x320",
help='monodepth model name')
parser.add_argument('--output-graphs', action='store_true', help='Output graphs')
parser.add_argument('--result-graphs', action='store_true', help='Result graphs')
parser.add_argument('--raw', action='store_true', help='RAW image')
args = parser.parse_args()
run(args)
| 46.9 | 127 | 0.706645 | from __future__ import absolute_import, division, print_function
import os
import argparse
import PIL.Image as pil
import rawpy
import torch
from torchvision import transforms
import numpy as np
import deps.monodepth2.networks as networks
from deps.monodepth2.utils import download_model_if_doesnt_exist
from seathru import preprocess_monodepth_depth_map, run_pipeline, estimate_sigma, denoise_tv_chambolle
def run(args):
assert args.model_name is not None, "You must specify the --model_name parameter; see README.md for an example"
if torch.cuda.is_available() and not args.no_cuda:
device = torch.device("cuda")
else:
device = torch.device("cpu")
download_model_if_doesnt_exist(args.model_name)
model_path = os.path.join("models", args.model_name)
print("-> Loading model from ", model_path)
encoder_path = os.path.join(model_path, "encoder.pth")
depth_decoder_path = os.path.join(model_path, "depth.pth")
print(" Loading pretrained encoder")
encoder = networks.ResnetEncoder(18, False)
loaded_dict_enc = torch.load(encoder_path, map_location=device)
feed_height = loaded_dict_enc['height']
feed_width = loaded_dict_enc['width']
filtered_dict_enc = {k: v for k, v in loaded_dict_enc.items() if k in encoder.state_dict()}
encoder.load_state_dict(filtered_dict_enc)
encoder.to(device)
encoder.eval()
print(" Loading pretrained decoder")
depth_decoder = networks.DepthDecoder(
num_ch_enc=encoder.num_ch_enc, scales=range(4))
loaded_dict = torch.load(depth_decoder_path, map_location=device)
depth_decoder.load_state_dict(loaded_dict)
depth_decoder.to(device)
depth_decoder.eval()
img = pil.fromarray(rawpy.imread(args.image).postprocess()) if args.raw else pil.open(args.image).convert('RGB')
img.thumbnail((args.size, args.size), pil.ANTIALIAS)
original_width, original_height = img.size
input_image = img.resize((feed_width, feed_height), pil.LANCZOS)
input_image = transforms.ToTensor()(input_image).unsqueeze(0)
print('Preprocessed image', flush=True)
input_image = input_image.to(device)
features = encoder(input_image)
outputs = depth_decoder(features)
disp = outputs[("disp", 0)]
disp_resized = torch.nn.functional.interpolate(
disp, (original_height, original_width), mode="bilinear", align_corners=False)
disp_resized_np = disp_resized.squeeze().cpu().detach().numpy()
mapped_im_depths = ((disp_resized_np - np.min(disp_resized_np)) / (
np.max(disp_resized_np) - np.min(disp_resized_np))).astype(np.float32)
print("Processed image", flush=True)
print('Loading image...', flush=True)
depths = preprocess_monodepth_depth_map(mapped_im_depths, args.monodepth_add_depth,
args.monodepth_multiply_depth)
recovered = run_pipeline(np.array(img) / 255.0, depths, args)
sigma_est = estimate_sigma(recovered, multichannel=True, average_sigmas=True) / 10.0
recovered = denoise_tv_chambolle(recovered, sigma_est, multichannel=True)
im = pil.fromarray((np.round(recovered * 255.0)).astype(np.uint8))
from pathlib import Path
p = Path(args.image)
im.save(args.output.format(p.stem), format='png')
print('Done.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--image', required=True, help='Input image')
parser.add_argument('--output', default='outputs/{0}.png', help='Output filename')
parser.add_argument('--f', type=float, default=2.0, help='f value (controls brightness)')
parser.add_argument('--l', type=float, default=0.5, help='l value (controls balance of attenuation constants)')
parser.add_argument('--p', type=float, default=0.01, help='p value (controls locality of illuminant map)')
parser.add_argument('--min-depth', type=float, default=0.0,
help='Minimum depth value to use in estimations (range 0-1)')
parser.add_argument('--max-depth', type=float, default=1.0,
help='Replacement depth percentile value for invalid depths (range 0-1)')
parser.add_argument('--spread-data-fraction', type=float, default=0.05,
help='Require data to be this fraction of depth range away from each other in attenuation estimations')
parser.add_argument('--size', type=int, default=320, help='Size to output')
parser.add_argument('--monodepth-add-depth', type=float, default=2.0, help='Additive value for monodepth map')
parser.add_argument('--monodepth-multiply-depth', type=float, default=10.0,
help='Multiplicative value for monodepth map')
parser.add_argument('--model-name', type=str, default="mono_1024x320",
help='monodepth model name')
parser.add_argument('--output-graphs', action='store_true', help='Output graphs')
parser.add_argument('--result-graphs', action='store_true', help='Result graphs')
parser.add_argument('--raw', action='store_true', help='RAW image')
args = parser.parse_args()
run(args)
| true | true |
f73d23c151e6c4812ac5f37754dfe25ad79c6e32 | 1,526 | py | Python | tests/test_fileio.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 10 | 2019-03-05T23:53:58.000Z | 2021-12-17T08:27:05.000Z | tests/test_fileio.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 7 | 2019-03-05T05:39:02.000Z | 2020-02-03T01:10:40.000Z | tests/test_fileio.py | richardt94/landshark | e4f347857a750d050d2cd568c6bcbd8f4a6c1f7f | [
"Apache-2.0"
] | 8 | 2019-03-23T22:55:25.000Z | 2021-01-12T05:14:31.000Z | """Test fileio modeule."""
# Copyright 2019 CSIRO (Data61)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from landshark.fileio import tifnames
tifs = [
"./integration/data/categorical/PM_Lithology_Unit_Type_missing.tif",
"./integration/data/categorical/Soil_ASC_missing.tif",
"./integration/data/continuous/SirSamualAusGravity2011_ll_missing.tif",
"./integration/data/continuous/SirSamualAusMagV6_rtp_ll_missing.tif",
"./integration/data/nonmissing/PM_Lithology_Unit_Type.tif",
"./integration/data/nonmissing/SirSamualAusGravity2011_ll.tif",
"./integration/data/nonmissing/SirSamualAusMagV6_rtp_ll.tif",
"./integration/data/nonmissing/Soil_ASC.tif",
]
dirs_tifs = [
(["./integration/data"], tifs),
(["./integration/data/categorical/"], tifs[:2]),
(["./integration/data/continuous/",
"./integration/data/categorical/"], tifs[:4]),
]
@pytest.mark.parametrize("dirs,tifs", dirs_tifs)
def test_tifnames(dirs, tifs):
assert set(tifnames(dirs)) == set(tifs)
| 35.488372 | 75 | 0.739843 |
import pytest
from landshark.fileio import tifnames
tifs = [
"./integration/data/categorical/PM_Lithology_Unit_Type_missing.tif",
"./integration/data/categorical/Soil_ASC_missing.tif",
"./integration/data/continuous/SirSamualAusGravity2011_ll_missing.tif",
"./integration/data/continuous/SirSamualAusMagV6_rtp_ll_missing.tif",
"./integration/data/nonmissing/PM_Lithology_Unit_Type.tif",
"./integration/data/nonmissing/SirSamualAusGravity2011_ll.tif",
"./integration/data/nonmissing/SirSamualAusMagV6_rtp_ll.tif",
"./integration/data/nonmissing/Soil_ASC.tif",
]
dirs_tifs = [
(["./integration/data"], tifs),
(["./integration/data/categorical/"], tifs[:2]),
(["./integration/data/continuous/",
"./integration/data/categorical/"], tifs[:4]),
]
@pytest.mark.parametrize("dirs,tifs", dirs_tifs)
def test_tifnames(dirs, tifs):
assert set(tifnames(dirs)) == set(tifs)
| true | true |
f73d241507e9e9cfd2e55c24bb01c66368138271 | 4,826 | py | Python | bc4py/user/api/ep_system.py | namuyan/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 12 | 2018-09-19T14:02:09.000Z | 2020-01-27T16:20:14.000Z | bc4py/user/api/ep_system.py | kumacoinproject/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 1 | 2020-03-19T16:57:30.000Z | 2020-03-19T16:57:30.000Z | bc4py/user/api/ep_system.py | namuyan/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 6 | 2018-11-13T17:20:14.000Z | 2020-02-15T11:46:52.000Z | from bc4py import __version__, __chain_version__, __message__
from bc4py.config import C, V, P
from bc4py.chain.utils import GompertzCurve, DEFAULT_TARGET
from bc4py.chain.difficulty import get_bits_by_hash, get_bias_by_hash
from bc4py.database import obj
from bc4py.user.api.utils import error_response, local_address
from bc4py.user.generate import generating_threads
from time import time
import p2p_python.config
import p2p_python
MAX_256_FLOAT = float(0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff)
start_time = int(time())
F_ADD_CACHE_INFO = False # to adjust cache size
__api_version__ = '0.0.2'
async def chain_info():
"""
This end-point show blockchain status of node.
"""
try:
best_height = obj.chain_builder.best_block.height
best_block = obj.chain_builder.best_block
best_block_info = best_block.getinfo()
best_block_info['hex'] = best_block.b.hex()
old_block_height = obj.chain_builder.best_chain[0].height - 1
old_block_hash = obj.chain_builder.get_block_hash(old_block_height).hex()
data = {'best': best_block_info}
difficulty = dict()
for consensus, ratio in V.BLOCK_CONSENSUSES.items():
name = C.consensus2name[consensus]
bits, target = get_bits_by_hash(previous_hash=best_block.hash, consensus=consensus)
target = float(target)
block_time = round(V.BLOCK_TIME_SPAN / ratio * 100)
diff = round(DEFAULT_TARGET / target, 8)
bias = get_bias_by_hash(previous_hash=best_block.previous_hash, consensus=consensus)
difficulty[name] = {
'number': consensus,
'bits': bits.to_bytes(4, 'big').hex(),
'diff': round(diff, 8),
'bias': round(bias, 8),
'fixed_diff': round(diff / bias, 8),
'hashrate(kh/s)': round((MAX_256_FLOAT/target) / block_time / 1000, 3)
}
data['mining'] = difficulty
data['size'] = best_block.size
data['checkpoint'] = {'height': old_block_height, 'blockhash': old_block_hash}
data['money_supply'] = GompertzCurve.calc_total_supply(best_height)
data['total_supply'] = GompertzCurve.k
if F_ADD_CACHE_INFO:
data['cache'] = {
'get_bits_by_hash': str(get_bits_by_hash.cache_info()),
'get_bias_by_hash': str(get_bias_by_hash.cache_info())
}
return data
except Exception:
return error_response()
async def chain_fork_info():
"""
This end-point show blockchain fork info.
"""
try:
best_chain = obj.chain_builder.best_chain
main_chain = [block.getinfo() for block in best_chain]
orphan_chain = [block.getinfo() for block in obj.chain_builder.chain.values() if block not in best_chain]
return {
'main': main_chain,
'orphan': sorted(orphan_chain, key=lambda x: x['height']),
'root': obj.chain_builder.root_block.getinfo(),
}
except Exception:
return error_response()
async def system_info():
"""
This end-point show public system info.
"""
return {
'network_ver': p2p_python.config.V.NETWORK_VER,
'system_ver': __version__,
'api_ver': __api_version__,
'chain_ver': __chain_version__,
'p2p_ver': p2p_python.__version__,
'message': __message__,
'booting': P.F_NOW_BOOTING,
'connections': len(V.P2P_OBJ.core.user),
'access_time': int(time()),
'start_time': start_time,
'genesis_time': V.BLOCK_GENESIS_TIME,
}
async def system_private_info():
"""
This end-point show private system info.
"""
try:
return {
'branch': V.BRANCH_NAME,
'source_hash': V.SOURCE_HASH,
'directory': V.DB_HOME_DIR,
'unconfirmed': [txhash.hex() for txhash in obj.tx_builder.memory_pool.list_all_hash()],
'generate_threads': [str(s) for s in generating_threads],
'local_address': list(local_address),
'prefetch_address': len(obj.account_builder.pre_fetch_addr),
'extended_key': repr(V.EXTENDED_KEY_OBJ),
}
except Exception:
return error_response()
async def network_info():
"""
This end-point show network connection info.
"""
try:
peers = list()
data = V.P2P_OBJ.core.get_my_user_header()
for user in V.P2P_OBJ.core.user:
peers.append(user.getinfo())
data['peers'] = peers
return data
except Exception:
return error_response()
__all__ = [
"__api_version__",
"chain_info",
"chain_fork_info",
"system_info",
"system_private_info",
"network_info",
]
| 34.471429 | 113 | 0.63448 | from bc4py import __version__, __chain_version__, __message__
from bc4py.config import C, V, P
from bc4py.chain.utils import GompertzCurve, DEFAULT_TARGET
from bc4py.chain.difficulty import get_bits_by_hash, get_bias_by_hash
from bc4py.database import obj
from bc4py.user.api.utils import error_response, local_address
from bc4py.user.generate import generating_threads
from time import time
import p2p_python.config
import p2p_python
MAX_256_FLOAT = float(0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff)
start_time = int(time())
F_ADD_CACHE_INFO = False
__api_version__ = '0.0.2'
async def chain_info():
try:
best_height = obj.chain_builder.best_block.height
best_block = obj.chain_builder.best_block
best_block_info = best_block.getinfo()
best_block_info['hex'] = best_block.b.hex()
old_block_height = obj.chain_builder.best_chain[0].height - 1
old_block_hash = obj.chain_builder.get_block_hash(old_block_height).hex()
data = {'best': best_block_info}
difficulty = dict()
for consensus, ratio in V.BLOCK_CONSENSUSES.items():
name = C.consensus2name[consensus]
bits, target = get_bits_by_hash(previous_hash=best_block.hash, consensus=consensus)
target = float(target)
block_time = round(V.BLOCK_TIME_SPAN / ratio * 100)
diff = round(DEFAULT_TARGET / target, 8)
bias = get_bias_by_hash(previous_hash=best_block.previous_hash, consensus=consensus)
difficulty[name] = {
'number': consensus,
'bits': bits.to_bytes(4, 'big').hex(),
'diff': round(diff, 8),
'bias': round(bias, 8),
'fixed_diff': round(diff / bias, 8),
'hashrate(kh/s)': round((MAX_256_FLOAT/target) / block_time / 1000, 3)
}
data['mining'] = difficulty
data['size'] = best_block.size
data['checkpoint'] = {'height': old_block_height, 'blockhash': old_block_hash}
data['money_supply'] = GompertzCurve.calc_total_supply(best_height)
data['total_supply'] = GompertzCurve.k
if F_ADD_CACHE_INFO:
data['cache'] = {
'get_bits_by_hash': str(get_bits_by_hash.cache_info()),
'get_bias_by_hash': str(get_bias_by_hash.cache_info())
}
return data
except Exception:
return error_response()
async def chain_fork_info():
try:
best_chain = obj.chain_builder.best_chain
main_chain = [block.getinfo() for block in best_chain]
orphan_chain = [block.getinfo() for block in obj.chain_builder.chain.values() if block not in best_chain]
return {
'main': main_chain,
'orphan': sorted(orphan_chain, key=lambda x: x['height']),
'root': obj.chain_builder.root_block.getinfo(),
}
except Exception:
return error_response()
async def system_info():
return {
'network_ver': p2p_python.config.V.NETWORK_VER,
'system_ver': __version__,
'api_ver': __api_version__,
'chain_ver': __chain_version__,
'p2p_ver': p2p_python.__version__,
'message': __message__,
'booting': P.F_NOW_BOOTING,
'connections': len(V.P2P_OBJ.core.user),
'access_time': int(time()),
'start_time': start_time,
'genesis_time': V.BLOCK_GENESIS_TIME,
}
async def system_private_info():
try:
return {
'branch': V.BRANCH_NAME,
'source_hash': V.SOURCE_HASH,
'directory': V.DB_HOME_DIR,
'unconfirmed': [txhash.hex() for txhash in obj.tx_builder.memory_pool.list_all_hash()],
'generate_threads': [str(s) for s in generating_threads],
'local_address': list(local_address),
'prefetch_address': len(obj.account_builder.pre_fetch_addr),
'extended_key': repr(V.EXTENDED_KEY_OBJ),
}
except Exception:
return error_response()
async def network_info():
try:
peers = list()
data = V.P2P_OBJ.core.get_my_user_header()
for user in V.P2P_OBJ.core.user:
peers.append(user.getinfo())
data['peers'] = peers
return data
except Exception:
return error_response()
__all__ = [
"__api_version__",
"chain_info",
"chain_fork_info",
"system_info",
"system_private_info",
"network_info",
]
| true | true |
f73d246dbd7ffe28388c48dca6f1393c92607979 | 835 | py | Python | schemas/emergency_contact.py | whiletrace/dwellinglybackend | e766b3d612b4c92fd337b82498ab8ef68bd95e1f | [
"MIT"
] | 15 | 2020-07-09T20:51:09.000Z | 2021-11-28T21:59:02.000Z | schemas/emergency_contact.py | codeforpdx/dwellinglybackend | 92fee6d19a68ae00750927b8700eaa7195b57668 | [
"MIT"
] | 148 | 2020-03-28T22:10:30.000Z | 2021-12-19T09:22:59.000Z | schemas/emergency_contact.py | whiletrace/dwellinglybackend | e766b3d612b4c92fd337b82498ab8ef68bd95e1f | [
"MIT"
] | 30 | 2020-03-12T02:31:27.000Z | 2021-07-29T02:40:36.000Z | from ma import ma
from models.emergency_contact import EmergencyContactModel
from marshmallow import fields, validates, ValidationError
from schemas.contact_number import ContactNumberSchema
class EmergencyContactSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model = EmergencyContactModel
contact_numbers = fields.List(fields.Nested(ContactNumberSchema), required=True)
@validates("name")
def validate_name(self, value):
if EmergencyContactModel.find_by_name(value):
raise ValidationError(f"{value} is already an emergency contact")
@validates("contact_numbers")
def validate_contact_numbers(self, contact_numbers):
if len(contact_numbers) < 1:
raise ValidationError(
"Emergency contacts must have at least one contact number."
)
| 33.4 | 84 | 0.731737 | from ma import ma
from models.emergency_contact import EmergencyContactModel
from marshmallow import fields, validates, ValidationError
from schemas.contact_number import ContactNumberSchema
class EmergencyContactSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model = EmergencyContactModel
contact_numbers = fields.List(fields.Nested(ContactNumberSchema), required=True)
@validates("name")
def validate_name(self, value):
if EmergencyContactModel.find_by_name(value):
raise ValidationError(f"{value} is already an emergency contact")
@validates("contact_numbers")
def validate_contact_numbers(self, contact_numbers):
if len(contact_numbers) < 1:
raise ValidationError(
"Emergency contacts must have at least one contact number."
)
| true | true |
f73d267f5f627caa7450779c2a8f2fceb1113360 | 1,664 | py | Python | ch04/gradient_2d.py | Yuansurex/deep_learning | b3597b414fb6fed2fbb7af8c8ac0b329c9e77417 | [
"MIT"
] | null | null | null | ch04/gradient_2d.py | Yuansurex/deep_learning | b3597b414fb6fed2fbb7af8c8ac0b329c9e77417 | [
"MIT"
] | null | null | null | ch04/gradient_2d.py | Yuansurex/deep_learning | b3597b414fb6fed2fbb7af8c8ac0b329c9e77417 | [
"MIT"
] | null | null | null | # coding: utf-8
# cf.http://d.hatena.ne.jp/white_wheels/20100327/p3
import numpy as np
import matplotlib.pylab as plt
from mpl_toolkits.mplot3d import Axes3D
def _numerical_gradient_no_batch(f, x):
h = 1e-4 # 0.0001
grad = np.zeros_like(x)
for idx in range(x.size):
tmp_val = x[idx]
x[idx] = float(tmp_val) + h
fxh1 = f(x) # f(x+h)
x[idx] = tmp_val - h
fxh2 = f(x) # f(x-h)
grad[idx] = (fxh1 - fxh2) / (2*h)
x[idx] = tmp_val # 还原值
return grad
def numerical_gradient(f, X):
if X.ndim == 1:
return _numerical_gradient_no_batch(f, X)
else:
grad = np.zeros_like(X)
for idx, x in enumerate(X): # (0, seq[0]), (1, seq[1]), (2, seq[2]), ...
grad[idx] = _numerical_gradient_no_batch(f, x)
return grad
def function_2(x):
if x.ndim == 1:
return np.sum(x**2)
else:
return np.sum(x**2, axis=1)
def tangent_line(f, x):
d = numerical_gradient(f, x)
print(d)
y = f(x) - d*x
return lambda t: d*t + y
if __name__ == '__main__':
x0 = np.arange(-2, 2.5, 0.25)
x1 = np.arange(-2, 2.5, 0.25)
X, Y = np.meshgrid(x0, x1) # 构建平面方格18*18=324
X = X.flatten() # (324,1)
Y = Y.flatten()
grad = numerical_gradient(function_2, np.array([X, Y])) # 拼成一个数组
plt.figure()
plt.quiver(X, Y, -grad[0], -grad[1], angles="xy",color="#666666") # headwidth=10,scale=40,color="#444444") 绘制二维矢量场图
plt.xlim([-2, 2])
plt.ylim([-2, 2])
plt.xlabel('x0')
plt.ylabel('x1')
plt.grid()
plt.legend()
plt.draw()
plt.show() | 24.115942 | 121 | 0.538462 |
import numpy as np
import matplotlib.pylab as plt
from mpl_toolkits.mplot3d import Axes3D
def _numerical_gradient_no_batch(f, x):
h = 1e-4
grad = np.zeros_like(x)
for idx in range(x.size):
tmp_val = x[idx]
x[idx] = float(tmp_val) + h
fxh1 = f(x)
x[idx] = tmp_val - h
fxh2 = f(x)
grad[idx] = (fxh1 - fxh2) / (2*h)
x[idx] = tmp_val
return grad
def numerical_gradient(f, X):
if X.ndim == 1:
return _numerical_gradient_no_batch(f, X)
else:
grad = np.zeros_like(X)
for idx, x in enumerate(X):
grad[idx] = _numerical_gradient_no_batch(f, x)
return grad
def function_2(x):
if x.ndim == 1:
return np.sum(x**2)
else:
return np.sum(x**2, axis=1)
def tangent_line(f, x):
d = numerical_gradient(f, x)
print(d)
y = f(x) - d*x
return lambda t: d*t + y
if __name__ == '__main__':
x0 = np.arange(-2, 2.5, 0.25)
x1 = np.arange(-2, 2.5, 0.25)
X, Y = np.meshgrid(x0, x1)
X = X.flatten()
Y = Y.flatten()
grad = numerical_gradient(function_2, np.array([X, Y]))
plt.figure()
plt.quiver(X, Y, -grad[0], -grad[1], angles="xy",color="#666666")
plt.xlim([-2, 2])
plt.ylim([-2, 2])
plt.xlabel('x0')
plt.ylabel('x1')
plt.grid()
plt.legend()
plt.draw()
plt.show() | true | true |
f73d268b360ce2bfb09eb113e5b444ab9cac137f | 756 | py | Python | python-algorithm/leetcode/problem_1765.py | isudox/leetcode-solution | 60085e64deaf396a171367affc94b18114565c43 | [
"MIT"
] | 5 | 2017-06-11T09:19:34.000Z | 2019-01-16T16:58:31.000Z | python-algorithm/leetcode/problem_1765.py | isudox/leetcode-solution | 60085e64deaf396a171367affc94b18114565c43 | [
"MIT"
] | null | null | null | python-algorithm/leetcode/problem_1765.py | isudox/leetcode-solution | 60085e64deaf396a171367affc94b18114565c43 | [
"MIT"
] | 1 | 2019-03-02T15:50:43.000Z | 2019-03-02T15:50:43.000Z | """1765. Map of Highest Peak
https://leetcode.com/problems/map-of-highest-peak/
"""
from typing import List
class Solution:
def highest_peak(self, is_water: List[List[int]]) -> List[List[int]]:
m, n = len(is_water), len(is_water[0])
ans = [[-1] * n for _ in range(m)]
q = []
for i in range(m):
for j in range(n):
if is_water[i][j]:
ans[i][j] = 0
q.append([i, j])
while q:
i, j = q.pop(0)
for x, y in [[i - 1, j], [i + 1, j], [i, j - 1], [i, j + 1]]:
if 0 <= x < m and 0 <= y < n and ans[x][y] == -1:
ans[x][y] = ans[i][j] + 1
q.append([x, y])
return ans
| 31.5 | 73 | 0.419312 | from typing import List
class Solution:
def highest_peak(self, is_water: List[List[int]]) -> List[List[int]]:
m, n = len(is_water), len(is_water[0])
ans = [[-1] * n for _ in range(m)]
q = []
for i in range(m):
for j in range(n):
if is_water[i][j]:
ans[i][j] = 0
q.append([i, j])
while q:
i, j = q.pop(0)
for x, y in [[i - 1, j], [i + 1, j], [i, j - 1], [i, j + 1]]:
if 0 <= x < m and 0 <= y < n and ans[x][y] == -1:
ans[x][y] = ans[i][j] + 1
q.append([x, y])
return ans
| true | true |
f73d2b3339e86cb727a6a149211b3c8ff0e9dfcb | 621 | py | Python | divide.py | rharyadi/cryptophrenia | 35fa4b0e4c24079909d73c11ee2fd5398a1ba931 | [
"WTFPL"
] | null | null | null | divide.py | rharyadi/cryptophrenia | 35fa4b0e4c24079909d73c11ee2fd5398a1ba931 | [
"WTFPL"
] | null | null | null | divide.py | rharyadi/cryptophrenia | 35fa4b0e4c24079909d73c11ee2fd5398a1ba931 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python3
"""
Dividing integer numbers and
returning list [q, r]. Which are quotient and remainder, respectively.
"""
from sys import argv
from errorhandler import zerodiv, inputerror
def divide(a,b):
try:
q = a // b
except ZeroDivisionError:
zerodiv("b")
r = a - b*q
return [q,r]
if __name__ == "__main__":
if len(argv) != 3:
inputerror(2)
try:
a, b = int(argv[1]), int(argv[2])
except ValueError:
inputerror(2)
[q,r] = divide(a,b)
print("""
a = %d, b = %d, q = %d, r = %d
satisfy a = b*q + r
"""
%(a,b,q,r))
| 18.264706 | 70 | 0.542673 |
from sys import argv
from errorhandler import zerodiv, inputerror
def divide(a,b):
try:
q = a // b
except ZeroDivisionError:
zerodiv("b")
r = a - b*q
return [q,r]
if __name__ == "__main__":
if len(argv) != 3:
inputerror(2)
try:
a, b = int(argv[1]), int(argv[2])
except ValueError:
inputerror(2)
[q,r] = divide(a,b)
print("""
a = %d, b = %d, q = %d, r = %d
satisfy a = b*q + r
"""
%(a,b,q,r))
| true | true |
f73d2c35e2773ea1a9629cb3506e18df9ed0bb46 | 464 | py | Python | data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_warren_large_fog_mustard.py | obi-two/GameServer | 7d37024e2291a97d49522610cd8f1dbe5666afc2 | [
"MIT"
] | 20 | 2015-02-23T15:11:56.000Z | 2022-03-18T20:56:48.000Z | data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_warren_large_fog_mustard.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | null | null | null | data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_warren_large_fog_mustard.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | 20 | 2015-04-04T16:35:59.000Z | 2022-03-24T14:54:37.000Z | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/base/shared_poi_all_lair_warren_large_fog_mustard.iff"
result.attribute_template_id = -1
result.stfName("lair_n","warren")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | 27.294118 | 95 | 0.734914 | true | true | |
f73d2d574ee01a343eb3a8f97c059592f8cd1389 | 4,004 | py | Python | doc/display/e-Paper-master/RaspberryPi_JetsonNano/python/examples/epd_7in5bc_test.py | bartoszp1992/Tacho2 | b8bf0928775c648b6191b7d90890d09bd87799f0 | [
"MIT"
] | null | null | null | doc/display/e-Paper-master/RaspberryPi_JetsonNano/python/examples/epd_7in5bc_test.py | bartoszp1992/Tacho2 | b8bf0928775c648b6191b7d90890d09bd87799f0 | [
"MIT"
] | null | null | null | doc/display/e-Paper-master/RaspberryPi_JetsonNano/python/examples/epd_7in5bc_test.py | bartoszp1992/Tacho2 | b8bf0928775c648b6191b7d90890d09bd87799f0 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding:utf-8 -*-
import sys
import os
picdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'pic')
libdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'lib')
if os.path.exists(libdir):
sys.path.append(libdir)
import logging
from waveshare_epd import epd7in5bc
import time
from PIL import Image,ImageDraw,ImageFont
import traceback
logging.basicConfig(level=logging.DEBUG)
try:
logging.info("epd7in5bc Demo")
epd = epd7in5bc.EPD()
logging.info("init and Clear")
epd.init()
epd.Clear()
time.sleep(1)
# Drawing on the image
logging.info("Drawing")
font24 = ImageFont.truetype(os.path.join(picdir, 'Font.ttc'), 24)
font18 = ImageFont.truetype(os.path.join(picdir, 'Font.ttc'), 18)
# Drawing on the Horizontal image
logging.info("1.Drawing on the Horizontal image...")
HBlackimage = Image.new('1', (epd.width, epd.height), 255) # 298*126
HRYimage = Image.new('1', (epd.width, epd.height), 255) # 298*126 ryimage: red or yellow image
drawblack = ImageDraw.Draw(HBlackimage)
drawry = ImageDraw.Draw(HRYimage)
drawblack.text((10, 0), 'hello world', font = font24, fill = 0)
drawblack.text((10, 20), '7.5inch e-Paper bc', font = font24, fill = 0)
drawblack.text((150, 0), u'微雪电子', font = font24, fill = 0)
drawblack.line((20, 50, 70, 100), fill = 0)
drawblack.line((70, 50, 20, 100), fill = 0)
drawblack.rectangle((20, 50, 70, 100), outline = 0)
drawry.line((165, 50, 165, 100), fill = 0)
drawry.line((140, 75, 190, 75), fill = 0)
drawry.arc((140, 50, 190, 100), 0, 360, fill = 0)
drawry.rectangle((80, 50, 130, 100), fill = 0)
drawry.chord((200, 50, 250, 100), 0, 360, fill = 0)
epd.display(epd.getbuffer(HBlackimage), epd.getbuffer(HRYimage))
time.sleep(2)
# Drawing on the Vertical image
logging.info("2.Drawing on the Vertical image...")
LBlackimage = Image.new('1', (epd.height, epd.width), 255) # 126*298
LRYimage = Image.new('1', (epd.height, epd.width), 255) # 126*298
drawblack = ImageDraw.Draw(LBlackimage)
drawry = ImageDraw.Draw(LRYimage)
drawblack.text((2, 0), 'hello world', font = font18, fill = 0)
drawblack.text((2, 20), '7.5inch epd bc', font = font18, fill = 0)
drawblack.text((20, 50), u'微雪电子', font = font18, fill = 0)
drawblack.line((10, 90, 60, 140), fill = 0)
drawblack.line((60, 90, 10, 140), fill = 0)
drawblack.rectangle((10, 90, 60, 140), outline = 0)
drawry.line((95, 90, 95, 140), fill = 0)
drawry.line((70, 115, 120, 115), fill = 0)
drawry.arc((70, 90, 120, 140), 0, 360, fill = 0)
drawry.rectangle((10, 150, 60, 200), fill = 0)
drawry.chord((70, 150, 120, 200), 0, 360, fill = 0)
epd.display(epd.getbuffer(LBlackimage), epd.getbuffer(LRYimage))
time.sleep(2)
logging.info("3.read bmp file")
HBlackimage = Image.open(os.path.join(picdir, '7in5b-b.bmp'))
HRYimage = Image.open(os.path.join(picdir, '7in5b-r.bmp'))
# HBlackimage = Image.open(os.path.join(picdir, '7in5c-b.bmp'))
# HRYimage = Image.open(os.path.join(picdir, '7in5c-r.bmp'))
epd.display(epd.getbuffer(HBlackimage), epd.getbuffer(HRYimage))
time.sleep(2)
logging.info("4.read bmp file on window")
blackimage1 = Image.new('1', (epd.width, epd.height), 255) # 298*126
redimage1 = Image.new('1', (epd.width, epd.height), 255) # 298*126
newimage = Image.open(os.path.join(picdir, '100x100.bmp'))
blackimage1.paste(newimage, (50,10))
epd.display(epd.getbuffer(blackimage1), epd.getbuffer(redimage1))
logging.info("Clear...")
epd.init()
epd.Clear()
logging.info("Goto Sleep...")
epd.sleep()
time.sleep(3)
epd.Dev_exit()
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info("ctrl + c:")
epd7in5bc.epdconfig.module_exit()
exit()
| 37.773585 | 102 | 0.633367 |
import sys
import os
picdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'pic')
libdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'lib')
if os.path.exists(libdir):
sys.path.append(libdir)
import logging
from waveshare_epd import epd7in5bc
import time
from PIL import Image,ImageDraw,ImageFont
import traceback
logging.basicConfig(level=logging.DEBUG)
try:
logging.info("epd7in5bc Demo")
epd = epd7in5bc.EPD()
logging.info("init and Clear")
epd.init()
epd.Clear()
time.sleep(1)
logging.info("Drawing")
font24 = ImageFont.truetype(os.path.join(picdir, 'Font.ttc'), 24)
font18 = ImageFont.truetype(os.path.join(picdir, 'Font.ttc'), 18)
logging.info("1.Drawing on the Horizontal image...")
HBlackimage = Image.new('1', (epd.width, epd.height), 255)
HRYimage = Image.new('1', (epd.width, epd.height), 255)
drawblack = ImageDraw.Draw(HBlackimage)
drawry = ImageDraw.Draw(HRYimage)
drawblack.text((10, 0), 'hello world', font = font24, fill = 0)
drawblack.text((10, 20), '7.5inch e-Paper bc', font = font24, fill = 0)
drawblack.text((150, 0), u'微雪电子', font = font24, fill = 0)
drawblack.line((20, 50, 70, 100), fill = 0)
drawblack.line((70, 50, 20, 100), fill = 0)
drawblack.rectangle((20, 50, 70, 100), outline = 0)
drawry.line((165, 50, 165, 100), fill = 0)
drawry.line((140, 75, 190, 75), fill = 0)
drawry.arc((140, 50, 190, 100), 0, 360, fill = 0)
drawry.rectangle((80, 50, 130, 100), fill = 0)
drawry.chord((200, 50, 250, 100), 0, 360, fill = 0)
epd.display(epd.getbuffer(HBlackimage), epd.getbuffer(HRYimage))
time.sleep(2)
logging.info("2.Drawing on the Vertical image...")
LBlackimage = Image.new('1', (epd.height, epd.width), 255)
LRYimage = Image.new('1', (epd.height, epd.width), 255)
drawblack = ImageDraw.Draw(LBlackimage)
drawry = ImageDraw.Draw(LRYimage)
drawblack.text((2, 0), 'hello world', font = font18, fill = 0)
drawblack.text((2, 20), '7.5inch epd bc', font = font18, fill = 0)
drawblack.text((20, 50), u'微雪电子', font = font18, fill = 0)
drawblack.line((10, 90, 60, 140), fill = 0)
drawblack.line((60, 90, 10, 140), fill = 0)
drawblack.rectangle((10, 90, 60, 140), outline = 0)
drawry.line((95, 90, 95, 140), fill = 0)
drawry.line((70, 115, 120, 115), fill = 0)
drawry.arc((70, 90, 120, 140), 0, 360, fill = 0)
drawry.rectangle((10, 150, 60, 200), fill = 0)
drawry.chord((70, 150, 120, 200), 0, 360, fill = 0)
epd.display(epd.getbuffer(LBlackimage), epd.getbuffer(LRYimage))
time.sleep(2)
logging.info("3.read bmp file")
HBlackimage = Image.open(os.path.join(picdir, '7in5b-b.bmp'))
HRYimage = Image.open(os.path.join(picdir, '7in5b-r.bmp'))
epd.display(epd.getbuffer(HBlackimage), epd.getbuffer(HRYimage))
time.sleep(2)
logging.info("4.read bmp file on window")
blackimage1 = Image.new('1', (epd.width, epd.height), 255)
redimage1 = Image.new('1', (epd.width, epd.height), 255)
newimage = Image.open(os.path.join(picdir, '100x100.bmp'))
blackimage1.paste(newimage, (50,10))
epd.display(epd.getbuffer(blackimage1), epd.getbuffer(redimage1))
logging.info("Clear...")
epd.init()
epd.Clear()
logging.info("Goto Sleep...")
epd.sleep()
time.sleep(3)
epd.Dev_exit()
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info("ctrl + c:")
epd7in5bc.epdconfig.module_exit()
exit()
| true | true |
f73d2e33146abf68afe3b33d48590d307b789078 | 972 | py | Python | app/config.py | ryomahan/read-tracardi-api | d0a012fb097ca81daf046b314000301eb54bfad8 | [
"MIT"
] | null | null | null | app/config.py | ryomahan/read-tracardi-api | d0a012fb097ca81daf046b314000301eb54bfad8 | [
"MIT"
] | null | null | null | app/config.py | ryomahan/read-tracardi-api | d0a012fb097ca81daf046b314000301eb54bfad8 | [
"MIT"
] | null | null | null | import os
class ServerConfig:
def __init__(self, env):
self.update_plugins_on_start_up = env[
'UPDATE_PLUGINS_ON_STARTUP'] if 'UPDATE_PLUGINS_ON_STARTUP' in env else True
self.make_slower_responses = float(
env['DEBUG_MAKE_SLOWER_RESPONSES']) if 'DEBUG_MAKE_SLOWER_RESPONSES' in env else 0
self.heartbeat_every = env['RUN_HEARTBEAT_EVERY'] if 'RUN_HEARTBEAT_EVERY' in env else 5 * 60
self.tasks_every = env['RUN_TASKS_EVERY'] if 'RUN_TASKS_EVERY' in env else 1
self.page_size = int(env['AUTOLOAD_PAGE_SIZE']) if 'AUTOLOAD_PAGE_SIZE' in env else 25
self.expose_gui_api = (env['EXPOSE_GUI_API'].lower() == "yes") if 'EXPOSE_GUI_API' in env else True
self.reset_plugins = (env['RESET_PLUGINS'].lower() == "yes") if 'RESET_PLUGINS' in env else False
self.x_forwarded_ip_header = env['USE_X_FORWARDED_IP'] if 'USE_X_FORWARDED_IP' in env else None
server = ServerConfig(os.environ)
| 51.157895 | 107 | 0.709877 | import os
class ServerConfig:
def __init__(self, env):
self.update_plugins_on_start_up = env[
'UPDATE_PLUGINS_ON_STARTUP'] if 'UPDATE_PLUGINS_ON_STARTUP' in env else True
self.make_slower_responses = float(
env['DEBUG_MAKE_SLOWER_RESPONSES']) if 'DEBUG_MAKE_SLOWER_RESPONSES' in env else 0
self.heartbeat_every = env['RUN_HEARTBEAT_EVERY'] if 'RUN_HEARTBEAT_EVERY' in env else 5 * 60
self.tasks_every = env['RUN_TASKS_EVERY'] if 'RUN_TASKS_EVERY' in env else 1
self.page_size = int(env['AUTOLOAD_PAGE_SIZE']) if 'AUTOLOAD_PAGE_SIZE' in env else 25
self.expose_gui_api = (env['EXPOSE_GUI_API'].lower() == "yes") if 'EXPOSE_GUI_API' in env else True
self.reset_plugins = (env['RESET_PLUGINS'].lower() == "yes") if 'RESET_PLUGINS' in env else False
self.x_forwarded_ip_header = env['USE_X_FORWARDED_IP'] if 'USE_X_FORWARDED_IP' in env else None
server = ServerConfig(os.environ)
| true | true |
f73d2e58f295701cf73dc09621f6476763b761f2 | 8,240 | py | Python | luna16/src/candidates.py | syagev/kaggle_dsb | 4927f9dee59092f513cbdc02cfc5954c4fb5e7eb | [
"Apache-2.0"
] | 9 | 2017-04-24T02:37:22.000Z | 2022-03-04T11:10:09.000Z | luna16/src/candidates.py | syagev/kaggle_dsb | 4927f9dee59092f513cbdc02cfc5954c4fb5e7eb | [
"Apache-2.0"
] | 1 | 2019-03-30T18:16:30.000Z | 2019-03-30T18:16:30.000Z | luna16/src/candidates.py | syagev/kaggle_dsb | 4927f9dee59092f513cbdc02cfc5954c4fb5e7eb | [
"Apache-2.0"
] | 8 | 2017-05-28T11:15:41.000Z | 2022-03-04T11:10:12.000Z | from __future__ import division
import numpy as np
from scipy.spatial.distance import pdist, squareform
from scipy.sparse.csgraph import connected_components
import pandas as pd
from tqdm import tqdm
import blob
import pickle
import glob
import os
import sys
import scipy.misc
from skimage.io import imread
from skimage import morphology
from scipy import ndimage
import cv2
import pickle
import matplotlib.pyplot as plt
from scipy.ndimage.measurements import center_of_mass
import subprocess
# import ptvsd
from image_read_write import load_itk_image
CANDIDATES_COLUMNS = ['seriesuid','coordX','coordY','coordZ','label']
THRESHOLD = 225
def unet_candidates():
cands = glob.glob('%s/*.png' % sys.argv[1]) #"/razberry/workspace/dsb_nodule_detection.109fd54/*.png
#df = pd.DataFrame(columns=['seriesuid','coordX','coordY','coordZ','class'])
data = []
imname = ""
origin = []
spacing = []
nrimages = 0
for name in tqdm(cands):
#image = imread(name)
image_t = imread(name)
image_t = image_t.transpose()
#Thresholding
image_t[image_t<THRESHOLD] = 0
image_t[image_t>0] = 1
#erosion
selem = morphology.disk(1)
image_eroded = image_t
image_eroded = morphology.binary_erosion(image_t,selem=selem)
label_im, nb_labels = ndimage.label(image_eroded)
imname3 = os.path.split(name)[1].replace('.png','')
splitted = imname3.split("_")
slice = splitted[1]
imname2 = splitted[0][:-1]
centers = []
for i in xrange(1,nb_labels+1):
blob_i = np.where(label_im==i,1,0)
mass = center_of_mass(blob_i)
centers.append([mass[1],mass[0]])
if imname2 != imname:
if os.path.isfile("../data/1_1_1mm_512_x_512_annotation_masks/spacings/{0}.pickle".format(imname2)):
with open("../data/1_1_1mm_512_x_512_annotation_masks/spacings/{0}.pickle".format(imname2), 'rb') as handle:
dic = pickle.load(handle)
origin = dic["origin"]
spacing = dic["spacing"]
imname = imname2
nrimages +=1
for center in centers:
# coords = voxel_2_world([int(slice),center[1]+(512-324)*0.5,center[0]+(512-324)*0.5],origin,spacing)
coords = [int(slice),center[1],center[0]]
data.append([imname2,coords[2],coords[1],coords[0],'?'])
#if nrimages == 5:
# break
df = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
save_candidates('%s/candidates.csv' % work_dir, df)
def candidates_to_image(cands,radius):
image_names = []
for subset in xrange(0,10):
subset_names = glob.glob("../data/subset{0}/*.mhd".format(subset))
names = [os.path.split(x)[1].replace('.mhd','') for x in subset_names]
image_names.append(names)
previous_candidate = ""
images = []
image = []
origin = []
spacing = []
number = 0
for candidate in tqdm(cands.values):
if candidate[0] != previous_candidate:
number = 0
previous_candidate = candidate[0]
for image_subset in xrange(0,10):
if candidate[0] in image_names[image_subset]:
image,origin,spacing = load_itk_image("../data/subset{0}/{1}.mhd".format(image_subset,candidate[0]))
break
coords = world_2_voxel([candidate[3],candidate[2],candidate[1]],origin,spacing)
im = image_part_from_candidate(image,coords,radius)
#images.append(im)
if candidate[4]:
label = "true"
else:
label = "false"
scipy.misc.imsave('../data/samples/{0}_{1}_{2}.jpg'.format(candidate[0],number,label), im)
number += 1
return images
def image_part_from_candidate(image,coords,radius):
im = np.zeros((radius*2,radius*2))
for x in xrange(-radius,radius):
for y in xrange(-radius,radius):
try:
im[x+radius,y+radius]=image[coords[0],coords[1]+x,coords[2]+y]
except:
im[x+radius,y+radius]=-1000
return im
#Merge candidates of a single scan
def merge_candidates_scan(candidates, seriesuid, distance=5.):
distances = pdist(candidates, metric='euclidean')
adjacency_matrix = squareform(distances)
# Determine nodes within distance, replace by 1 (=adjacency matrix)
adjacency_matrix = np.where(adjacency_matrix<=distance,1,0)
# Determine all connected components in the graph
n, labels = connected_components(adjacency_matrix)
new_candidates = np.zeros((n,3))
# Take the mean for these connected components
for cluster_i in range(n):
points = candidates[np.where(labels==cluster_i)]
center = np.mean(points,axis=0)
new_candidates[cluster_i,:] = center
x = new_candidates[:,0]
y = new_candidates[:,1]
z = new_candidates[:,2]
labels = [seriesuid]*len(x)
class_name = [0]*len(x)
data= zip(labels,x,y,z,class_name)
new_candidates = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
return new_candidates
def merge_candidates(df_candidates, distance=5.):
new_candidates = pd.DataFrame()
for scan_name in tqdm(df_candidates['seriesuid'].unique()):
#print "Merging scan", scan_name
df = df_candidates[df_candidates['seriesuid']==scan_name]
x = df['coordX']
y = df['coordY']
z = df['coordZ']
shape = (len(x),3)
candidates = np.zeros(shape)
candidates[:,0]=x
candidates[:,1]=y
candidates[:,2]=z
new = merge_candidates_scan(candidates,seriesuid=scan_name,distance=distance)
new_candidates = new_candidates.append(new)
#print new_candidates
return new_candidates
def world_2_voxel(worldCoord, origin, spacing):
stretchedVoxelCoord = np.absolute(worldCoord - origin)
voxelCoord = stretchedVoxelCoord / spacing
return voxelCoord
def voxel_2_world(voxelCoord, origin, spacing):
stretchedVoxelCoord = voxelCoord * spacing
worldCoord = stretchedVoxelCoord + origin
return worldCoord
def load_candidates(filename, as_coords=False):
candidates = pd.read_csv(filename)
return candidates
# Save candidates given filename and pandas dataframe
# Dataframe with columns:
# seriesuid, coordX, coordY, coordZ, class
# class seems to be 0 always
def save_candidates(filename, df_candidates):
df_candidates.to_csv(filename, index=False)
def coords_to_candidates(coords, seriesuid):
x = coords[:,0]
y = coords[:,1]
z = coords[:,2]
names = [seriesuid]*len(x)
class_name = [0]*len(x)
data = zip(names,x,y,z,class_name)
candidates = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
return candidates
if __name__ == "__main__":
# ptvsd.enable_attach(None, address = ('0.0.0.0', 3001))
# ptvsd.wait_for_attach()
work_dir = sys.argv[2]
if not os.path.exists(work_dir):
os.makedirs(work_dir)
unet_candidates()
# quit()
df = load_candidates('%s/candidates.csv' % work_dir)
# images = candidates_to_image(df,15)
new_candidates = merge_candidates(df)
save_candidates('%s/candidates_merged.csv' % work_dir, new_candidates)
# #coords = blob.blob_image('../data/hoi.mhd')
# #with open('../data/hoi_coords.pkl','w') as f:
# # pickle.dump(coords, f)
# with open('../data/hoi_coords.pkl','r') as f:
# candidates = pickle.load(f)
# coords = []
# #coords = [y for y in [x for x in candidates]]
# #slice, blob, xyz
# for slice in candidates:
# #print slice
# for blob in slice:
# coords.append(blob)
# #print coords
# image, origin, spacing = load_itk_image('../data/hoi.mhd')
# world_coords = np.array([voxel_2_world(y,origin,spacing) for y in coords])
# #print world_coords
# candidates = coords_to_candidates(world_coords, '1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260')
# print len(candidates)
# candidates = merge_candidates(candidates)
# print len(candidates)
# save_candidates('../data/hoi_candidates.csv', candidates)
| 31.450382 | 124 | 0.641383 | from __future__ import division
import numpy as np
from scipy.spatial.distance import pdist, squareform
from scipy.sparse.csgraph import connected_components
import pandas as pd
from tqdm import tqdm
import blob
import pickle
import glob
import os
import sys
import scipy.misc
from skimage.io import imread
from skimage import morphology
from scipy import ndimage
import cv2
import pickle
import matplotlib.pyplot as plt
from scipy.ndimage.measurements import center_of_mass
import subprocess
from image_read_write import load_itk_image
CANDIDATES_COLUMNS = ['seriesuid','coordX','coordY','coordZ','label']
THRESHOLD = 225
def unet_candidates():
cands = glob.glob('%s/*.png' % sys.argv[1])
#df = pd.DataFrame(columns=['seriesuid','coordX','coordY','coordZ','class'])
data = []
imname = ""
origin = []
spacing = []
nrimages = 0
for name in tqdm(cands):
#image = imread(name)
image_t = imread(name)
image_t = image_t.transpose()
#Thresholding
image_t[image_t<THRESHOLD] = 0
image_t[image_t>0] = 1
#erosion
selem = morphology.disk(1)
image_eroded = image_t
image_eroded = morphology.binary_erosion(image_t,selem=selem)
label_im, nb_labels = ndimage.label(image_eroded)
imname3 = os.path.split(name)[1].replace('.png','')
splitted = imname3.split("_")
slice = splitted[1]
imname2 = splitted[0][:-1]
centers = []
for i in xrange(1,nb_labels+1):
blob_i = np.where(label_im==i,1,0)
mass = center_of_mass(blob_i)
centers.append([mass[1],mass[0]])
if imname2 != imname:
if os.path.isfile("../data/1_1_1mm_512_x_512_annotation_masks/spacings/{0}.pickle".format(imname2)):
with open("../data/1_1_1mm_512_x_512_annotation_masks/spacings/{0}.pickle".format(imname2), 'rb') as handle:
dic = pickle.load(handle)
origin = dic["origin"]
spacing = dic["spacing"]
imname = imname2
nrimages +=1
for center in centers:
# coords = voxel_2_world([int(slice),center[1]+(512-324)*0.5,center[0]+(512-324)*0.5],origin,spacing)
coords = [int(slice),center[1],center[0]]
data.append([imname2,coords[2],coords[1],coords[0],'?'])
#if nrimages == 5:
# break
df = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
save_candidates('%s/candidates.csv' % work_dir, df)
def candidates_to_image(cands,radius):
image_names = []
for subset in xrange(0,10):
subset_names = glob.glob("../data/subset{0}/*.mhd".format(subset))
names = [os.path.split(x)[1].replace('.mhd','') for x in subset_names]
image_names.append(names)
previous_candidate = ""
images = []
image = []
origin = []
spacing = []
number = 0
for candidate in tqdm(cands.values):
if candidate[0] != previous_candidate:
number = 0
previous_candidate = candidate[0]
for image_subset in xrange(0,10):
if candidate[0] in image_names[image_subset]:
image,origin,spacing = load_itk_image("../data/subset{0}/{1}.mhd".format(image_subset,candidate[0]))
break
coords = world_2_voxel([candidate[3],candidate[2],candidate[1]],origin,spacing)
im = image_part_from_candidate(image,coords,radius)
#images.append(im)
if candidate[4]:
label = "true"
else:
label = "false"
scipy.misc.imsave('../data/samples/{0}_{1}_{2}.jpg'.format(candidate[0],number,label), im)
number += 1
return images
def image_part_from_candidate(image,coords,radius):
im = np.zeros((radius*2,radius*2))
for x in xrange(-radius,radius):
for y in xrange(-radius,radius):
try:
im[x+radius,y+radius]=image[coords[0],coords[1]+x,coords[2]+y]
except:
im[x+radius,y+radius]=-1000
return im
#Merge candidates of a single scan
def merge_candidates_scan(candidates, seriesuid, distance=5.):
distances = pdist(candidates, metric='euclidean')
adjacency_matrix = squareform(distances)
# Determine nodes within distance, replace by 1 (=adjacency matrix)
adjacency_matrix = np.where(adjacency_matrix<=distance,1,0)
# Determine all connected components in the graph
n, labels = connected_components(adjacency_matrix)
new_candidates = np.zeros((n,3))
# Take the mean for these connected components
for cluster_i in range(n):
points = candidates[np.where(labels==cluster_i)]
center = np.mean(points,axis=0)
new_candidates[cluster_i,:] = center
x = new_candidates[:,0]
y = new_candidates[:,1]
z = new_candidates[:,2]
labels = [seriesuid]*len(x)
class_name = [0]*len(x)
data= zip(labels,x,y,z,class_name)
new_candidates = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
return new_candidates
def merge_candidates(df_candidates, distance=5.):
new_candidates = pd.DataFrame()
for scan_name in tqdm(df_candidates['seriesuid'].unique()):
#print "Merging scan", scan_name
df = df_candidates[df_candidates['seriesuid']==scan_name]
x = df['coordX']
y = df['coordY']
z = df['coordZ']
shape = (len(x),3)
candidates = np.zeros(shape)
candidates[:,0]=x
candidates[:,1]=y
candidates[:,2]=z
new = merge_candidates_scan(candidates,seriesuid=scan_name,distance=distance)
new_candidates = new_candidates.append(new)
#print new_candidates
return new_candidates
def world_2_voxel(worldCoord, origin, spacing):
stretchedVoxelCoord = np.absolute(worldCoord - origin)
voxelCoord = stretchedVoxelCoord / spacing
return voxelCoord
def voxel_2_world(voxelCoord, origin, spacing):
stretchedVoxelCoord = voxelCoord * spacing
worldCoord = stretchedVoxelCoord + origin
return worldCoord
def load_candidates(filename, as_coords=False):
candidates = pd.read_csv(filename)
return candidates
# Save candidates given filename and pandas dataframe
# Dataframe with columns:
# seriesuid, coordX, coordY, coordZ, class
# class seems to be 0 always
def save_candidates(filename, df_candidates):
df_candidates.to_csv(filename, index=False)
def coords_to_candidates(coords, seriesuid):
x = coords[:,0]
y = coords[:,1]
z = coords[:,2]
names = [seriesuid]*len(x)
class_name = [0]*len(x)
data = zip(names,x,y,z,class_name)
candidates = pd.DataFrame(data,columns=CANDIDATES_COLUMNS)
return candidates
if __name__ == "__main__":
# ptvsd.enable_attach(None, address = ('0.0.0.0', 3001))
# ptvsd.wait_for_attach()
work_dir = sys.argv[2]
if not os.path.exists(work_dir):
os.makedirs(work_dir)
unet_candidates()
# quit()
df = load_candidates('%s/candidates.csv' % work_dir)
# images = candidates_to_image(df,15)
new_candidates = merge_candidates(df)
save_candidates('%s/candidates_merged.csv' % work_dir, new_candidates)
# #coords = blob.blob_image('../data/hoi.mhd')
# #with open('../data/hoi_coords.pkl','w') as f:
# # pickle.dump(coords, f)
# with open('../data/hoi_coords.pkl','r') as f:
# candidates = pickle.load(f)
# coords = []
# #coords = [y for y in [x for x in candidates]]
# #slice, blob, xyz
# for slice in candidates:
# #print slice
# for blob in slice:
# coords.append(blob)
# #print coords
# image, origin, spacing = load_itk_image('../data/hoi.mhd')
# world_coords = np.array([voxel_2_world(y,origin,spacing) for y in coords])
# #print world_coords
# candidates = coords_to_candidates(world_coords, '1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260')
# print len(candidates)
# candidates = merge_candidates(candidates)
# print len(candidates)
# save_candidates('../data/hoi_candidates.csv', candidates)
| true | true |
f73d2f3b56ee16c0c4343aebabe529cb0d21a8aa | 2,328 | py | Python | code/ARAX/NodeSynonymizer/dump_kg2_equivalencies.py | finnagin/RTX | 698fab0d5ac507e92b5190878922f9fa5b72d9a7 | [
"MIT"
] | null | null | null | code/ARAX/NodeSynonymizer/dump_kg2_equivalencies.py | finnagin/RTX | 698fab0d5ac507e92b5190878922f9fa5b72d9a7 | [
"MIT"
] | null | null | null | code/ARAX/NodeSynonymizer/dump_kg2_equivalencies.py | finnagin/RTX | 698fab0d5ac507e92b5190878922f9fa5b72d9a7 | [
"MIT"
] | null | null | null | #!/bin/env python3
"""
This script creates a TSV of node pairs linked by an 'equivalent_to'/'same_as' relationship in KG2. The TSV file is
created in the same directory the script is run from. Example of rows in the output file:
UMLS:C0027358 UMLS:C0014563
UMLS:C0878440 UMLS:C0014563
Usage: python dump_kg2_equivalencies.py
"""
import csv
import os
import sys
import traceback
from typing import List, Dict
from neo4j import GraphDatabase
sys.path.append(os.path.dirname(os.path.abspath(__file__))+"/../../") # code directory
from RTXConfiguration import RTXConfiguration
def _run_cypher_query(cypher_query: str, kg="KG2") -> List[Dict[str, any]]:
# This function sends a cypher query to neo4j (either KG1 or KG2) and returns results
rtxc = RTXConfiguration()
if kg == "KG2":
rtxc.live = "KG2"
try:
driver = GraphDatabase.driver(rtxc.neo4j_bolt, auth=(rtxc.neo4j_username, rtxc.neo4j_password))
with driver.session() as session:
print(f"Sending cypher query to {kg} neo4j")
query_results = session.run(cypher_query).data()
print(f"Got {len(query_results)} results back from neo4j")
driver.close()
except Exception:
tb = traceback.format_exc()
error_type, error, _ = sys.exc_info()
print(f"Encountered an error interacting with {kg} neo4j. {tb}")
return []
else:
return query_results
def dump_kg2_equivalencies():
# This function creates a TSV file of node pairs linked by an 'equivalent_to' or 'same_as' relationship in KG2
cypher_query = f"match (n1)-[:equivalent_to|:same_as]->(n2) return distinct n1.id, n2.id"
equivalent_node_pairs = _run_cypher_query(cypher_query)
if equivalent_node_pairs:
column_headers = equivalent_node_pairs[0].keys()
file_name = "kg2_equivalencies.tsv"
with open(file_name, "w+") as output_file:
dict_writer = csv.DictWriter(output_file, column_headers, delimiter='\t')
dict_writer.writeheader()
dict_writer.writerows(equivalent_node_pairs)
print(f"Successfully created file '{file_name}' containing results")
else:
print(f"Sorry, couldn't get equivalency data. No file created.")
def main():
dump_kg2_equivalencies()
if __name__ == "__main__":
main()
| 36.375 | 115 | 0.695017 |
import csv
import os
import sys
import traceback
from typing import List, Dict
from neo4j import GraphDatabase
sys.path.append(os.path.dirname(os.path.abspath(__file__))+"/../../")
from RTXConfiguration import RTXConfiguration
def _run_cypher_query(cypher_query: str, kg="KG2") -> List[Dict[str, any]]:
rtxc = RTXConfiguration()
if kg == "KG2":
rtxc.live = "KG2"
try:
driver = GraphDatabase.driver(rtxc.neo4j_bolt, auth=(rtxc.neo4j_username, rtxc.neo4j_password))
with driver.session() as session:
print(f"Sending cypher query to {kg} neo4j")
query_results = session.run(cypher_query).data()
print(f"Got {len(query_results)} results back from neo4j")
driver.close()
except Exception:
tb = traceback.format_exc()
error_type, error, _ = sys.exc_info()
print(f"Encountered an error interacting with {kg} neo4j. {tb}")
return []
else:
return query_results
def dump_kg2_equivalencies():
cypher_query = f"match (n1)-[:equivalent_to|:same_as]->(n2) return distinct n1.id, n2.id"
equivalent_node_pairs = _run_cypher_query(cypher_query)
if equivalent_node_pairs:
column_headers = equivalent_node_pairs[0].keys()
file_name = "kg2_equivalencies.tsv"
with open(file_name, "w+") as output_file:
dict_writer = csv.DictWriter(output_file, column_headers, delimiter='\t')
dict_writer.writeheader()
dict_writer.writerows(equivalent_node_pairs)
print(f"Successfully created file '{file_name}' containing results")
else:
print(f"Sorry, couldn't get equivalency data. No file created.")
def main():
dump_kg2_equivalencies()
if __name__ == "__main__":
main()
| true | true |
f73d3014ce3deda6465ef36663afe41548aa8143 | 1,822 | py | Python | tests/test_diffeq/test_odefilter/test_odefilter_cases.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | tests/test_diffeq/test_odefilter/test_odefilter_cases.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | tests/test_diffeq/test_odefilter/test_odefilter_cases.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | """Test-cases for ODE filters."""
import pytest_cases
from probnum import diffeq, randprocs
import probnum.problems.zoo.diffeq as diffeq_zoo
# logistic.rhs is implemented backend-agnostic,
# thus it works for both numpy and jax
@pytest_cases.case(tags=("numpy", "jax"))
def problem_logistic():
return diffeq_zoo.logistic()
def steprule_constant():
return diffeq.stepsize.ConstantSteps(0.5)
def steprule_adaptive():
return diffeq.stepsize.AdaptiveSteps(firststep=0.5, atol=0.2, rtol=0.2)
def diffusion_constant():
return randprocs.markov.continuous.ConstantDiffusion()
def diffusion_piecewise_constant():
return randprocs.markov.continuous.PiecewiseConstantDiffusion(t0=0.0)
@pytest_cases.case(tags=("numpy",))
def init_non_prob_fit():
return diffeq.odefilter.init_routines.NonProbabilisticFit()
@pytest_cases.case(tags=("numpy",))
def init_non_prob_fit_with_jacobian():
return diffeq.odefilter.init_routines.NonProbabilisticFitWithJacobian()
@pytest_cases.case(tags=("numpy",))
def init_stack():
return diffeq.odefilter.init_routines.Stack()
@pytest_cases.case(tags=("numpy",))
def init_stack_with_jacobian():
return diffeq.odefilter.init_routines.StackWithJacobian()
@pytest_cases.case(tags=("jax",))
def init_forward():
return diffeq.odefilter.init_routines.ForwardMode()
@pytest_cases.case(tags=("jax",))
def init_forward_jvp():
return diffeq.odefilter.init_routines.ForwardModeJVP()
@pytest_cases.case(tags=("jax",))
def init_reverse():
return diffeq.odefilter.init_routines.ReverseMode()
@pytest_cases.case(tags=("jax",))
def init_taylor():
return diffeq.odefilter.init_routines.TaylorMode()
def approx_ek0():
return diffeq.odefilter.approx_strategies.EK0()
def approx_ek1():
return diffeq.odefilter.approx_strategies.EK1()
| 23.063291 | 75 | 0.763996 |
import pytest_cases
from probnum import diffeq, randprocs
import probnum.problems.zoo.diffeq as diffeq_zoo
@pytest_cases.case(tags=("numpy", "jax"))
def problem_logistic():
return diffeq_zoo.logistic()
def steprule_constant():
return diffeq.stepsize.ConstantSteps(0.5)
def steprule_adaptive():
return diffeq.stepsize.AdaptiveSteps(firststep=0.5, atol=0.2, rtol=0.2)
def diffusion_constant():
return randprocs.markov.continuous.ConstantDiffusion()
def diffusion_piecewise_constant():
return randprocs.markov.continuous.PiecewiseConstantDiffusion(t0=0.0)
@pytest_cases.case(tags=("numpy",))
def init_non_prob_fit():
return diffeq.odefilter.init_routines.NonProbabilisticFit()
@pytest_cases.case(tags=("numpy",))
def init_non_prob_fit_with_jacobian():
return diffeq.odefilter.init_routines.NonProbabilisticFitWithJacobian()
@pytest_cases.case(tags=("numpy",))
def init_stack():
return diffeq.odefilter.init_routines.Stack()
@pytest_cases.case(tags=("numpy",))
def init_stack_with_jacobian():
return diffeq.odefilter.init_routines.StackWithJacobian()
@pytest_cases.case(tags=("jax",))
def init_forward():
return diffeq.odefilter.init_routines.ForwardMode()
@pytest_cases.case(tags=("jax",))
def init_forward_jvp():
return diffeq.odefilter.init_routines.ForwardModeJVP()
@pytest_cases.case(tags=("jax",))
def init_reverse():
return diffeq.odefilter.init_routines.ReverseMode()
@pytest_cases.case(tags=("jax",))
def init_taylor():
return diffeq.odefilter.init_routines.TaylorMode()
def approx_ek0():
return diffeq.odefilter.approx_strategies.EK0()
def approx_ek1():
return diffeq.odefilter.approx_strategies.EK1()
| true | true |
f73d305b487bdcf6bdafdf47ab2a0f5839a18a9e | 91 | py | Python | pardakht/apps.py | riozo-h/django-pardakht | 8ec4455440926d05dd971ef571f11c1ef974fe4f | [
"MIT"
] | 10 | 2018-04-03T07:37:52.000Z | 2019-07-19T21:22:16.000Z | pardakht/apps.py | riozo-h/django-pardakht | 8ec4455440926d05dd971ef571f11c1ef974fe4f | [
"MIT"
] | 4 | 2018-05-26T08:28:41.000Z | 2019-10-26T01:30:00.000Z | pardakht/apps.py | riozo-h/django-pardakht | 8ec4455440926d05dd971ef571f11c1ef974fe4f | [
"MIT"
] | 7 | 2018-04-24T03:14:58.000Z | 2019-12-15T19:12:16.000Z | from django.apps import AppConfig
class PardakhtConfig(AppConfig):
name = 'pardakht'
| 15.166667 | 33 | 0.758242 | from django.apps import AppConfig
class PardakhtConfig(AppConfig):
name = 'pardakht'
| true | true |
f73d311168e8bb8d2be3cc630c2a8d9ae4b33f82 | 3,875 | py | Python | troposphere/licensemanager.py | filipepmo/troposphere | b1590f58ed8cc86ba18a19ed93fc9380d6f7306b | [
"BSD-2-Clause"
] | null | null | null | troposphere/licensemanager.py | filipepmo/troposphere | b1590f58ed8cc86ba18a19ed93fc9380d6f7306b | [
"BSD-2-Clause"
] | null | null | null | troposphere/licensemanager.py | filipepmo/troposphere | b1590f58ed8cc86ba18a19ed93fc9380d6f7306b | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 51.0.0
from . import AWSObject, AWSProperty, PropsDictType
from .validators import boolean, integer
class Grant(AWSObject):
"""
`Grant <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-licensemanager-grant.html>`__
"""
resource_type = "AWS::LicenseManager::Grant"
props: PropsDictType = {
"AllowedOperations": ([str], False),
"GrantName": (str, False),
"HomeRegion": (str, False),
"LicenseArn": (str, False),
"Principals": ([str], False),
"Status": (str, False),
}
class BorrowConfiguration(AWSProperty):
"""
`BorrowConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-borrowconfiguration.html>`__
"""
props: PropsDictType = {
"AllowEarlyCheckIn": (boolean, True),
"MaxTimeToLiveInMinutes": (integer, True),
}
class ProvisionalConfiguration(AWSProperty):
"""
`ProvisionalConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-provisionalconfiguration.html>`__
"""
props: PropsDictType = {
"MaxTimeToLiveInMinutes": (integer, True),
}
class ConsumptionConfiguration(AWSProperty):
"""
`ConsumptionConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-consumptionconfiguration.html>`__
"""
props: PropsDictType = {
"BorrowConfiguration": (BorrowConfiguration, False),
"ProvisionalConfiguration": (ProvisionalConfiguration, False),
"RenewType": (str, False),
}
class Entitlement(AWSProperty):
"""
`Entitlement <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-entitlement.html>`__
"""
props: PropsDictType = {
"AllowCheckIn": (boolean, False),
"MaxCount": (integer, False),
"Name": (str, True),
"Overage": (boolean, False),
"Unit": (str, True),
"Value": (str, False),
}
class IssuerData(AWSProperty):
"""
`IssuerData <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-issuerdata.html>`__
"""
props: PropsDictType = {
"Name": (str, True),
"SignKey": (str, False),
}
class Metadata(AWSProperty):
"""
`Metadata <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-metadata.html>`__
"""
props: PropsDictType = {
"Name": (str, True),
"Value": (str, True),
}
class ValidityDateFormat(AWSProperty):
"""
`ValidityDateFormat <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-licensemanager-license-validitydateformat.html>`__
"""
props: PropsDictType = {
"Begin": (str, True),
"End": (str, True),
}
class License(AWSObject):
"""
`License <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-licensemanager-license.html>`__
"""
resource_type = "AWS::LicenseManager::License"
props: PropsDictType = {
"Beneficiary": (str, False),
"ConsumptionConfiguration": (ConsumptionConfiguration, True),
"Entitlements": ([Entitlement], True),
"HomeRegion": (str, True),
"Issuer": (IssuerData, True),
"LicenseMetadata": ([Metadata], False),
"LicenseName": (str, True),
"ProductName": (str, True),
"ProductSKU": (str, False),
"Status": (str, False),
"Validity": (ValidityDateFormat, True),
}
| 29.356061 | 164 | 0.653935 |
from . import AWSObject, AWSProperty, PropsDictType
from .validators import boolean, integer
class Grant(AWSObject):
resource_type = "AWS::LicenseManager::Grant"
props: PropsDictType = {
"AllowedOperations": ([str], False),
"GrantName": (str, False),
"HomeRegion": (str, False),
"LicenseArn": (str, False),
"Principals": ([str], False),
"Status": (str, False),
}
class BorrowConfiguration(AWSProperty):
props: PropsDictType = {
"AllowEarlyCheckIn": (boolean, True),
"MaxTimeToLiveInMinutes": (integer, True),
}
class ProvisionalConfiguration(AWSProperty):
props: PropsDictType = {
"MaxTimeToLiveInMinutes": (integer, True),
}
class ConsumptionConfiguration(AWSProperty):
props: PropsDictType = {
"BorrowConfiguration": (BorrowConfiguration, False),
"ProvisionalConfiguration": (ProvisionalConfiguration, False),
"RenewType": (str, False),
}
class Entitlement(AWSProperty):
props: PropsDictType = {
"AllowCheckIn": (boolean, False),
"MaxCount": (integer, False),
"Name": (str, True),
"Overage": (boolean, False),
"Unit": (str, True),
"Value": (str, False),
}
class IssuerData(AWSProperty):
props: PropsDictType = {
"Name": (str, True),
"SignKey": (str, False),
}
class Metadata(AWSProperty):
props: PropsDictType = {
"Name": (str, True),
"Value": (str, True),
}
class ValidityDateFormat(AWSProperty):
props: PropsDictType = {
"Begin": (str, True),
"End": (str, True),
}
class License(AWSObject):
resource_type = "AWS::LicenseManager::License"
props: PropsDictType = {
"Beneficiary": (str, False),
"ConsumptionConfiguration": (ConsumptionConfiguration, True),
"Entitlements": ([Entitlement], True),
"HomeRegion": (str, True),
"Issuer": (IssuerData, True),
"LicenseMetadata": ([Metadata], False),
"LicenseName": (str, True),
"ProductName": (str, True),
"ProductSKU": (str, False),
"Status": (str, False),
"Validity": (ValidityDateFormat, True),
}
| true | true |
f73d31b6906aad78335358a191cb79d885c8c887 | 27,081 | py | Python | pieces/protocol.py | SteBry/TDDE35-Building-a-BitTorrent-streaming-client | 35ad1707adcec9c7ef9df6900c3434380566828a | [
"Apache-2.0"
] | 1 | 2019-04-11T08:59:29.000Z | 2019-04-11T08:59:29.000Z | pieces/protocol.py | SteBry/TDDE35-Building-a-BitTorrent-streaming-client | 35ad1707adcec9c7ef9df6900c3434380566828a | [
"Apache-2.0"
] | null | null | null | pieces/protocol.py | SteBry/TDDE35-Building-a-BitTorrent-streaming-client | 35ad1707adcec9c7ef9df6900c3434380566828a | [
"Apache-2.0"
] | null | null | null | #
# pieces - An experimental BitTorrent client
#
# Copyright 2016 markus.eliasson@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import logging
import struct
from asyncio import Queue
from concurrent.futures import CancelledError
import datetime
import bitstring
# The default request size for blocks of pieces is 2^14 bytes.
#
# NOTE: The official specification states that 2^15 is the default request
# size - but in reality all implementations use 2^14. See the
# unofficial specification for more details on this matter.
#
# https://wiki.theory.org/BitTorrentSpecification
#
REQUEST_SIZE = 2**14
HANDSHAKE_TIMEOUT = 4
class ProtocolError(BaseException):
pass
class PeerConnection:
"""
A peer connection used to download and upload pieces.
The peer connection will consume one available peer from the given queue.
Based on the peer details the PeerConnection will try to open a connection
and perform a BitTorrent handshake.
After a successful handshake, the PeerConnection will be in a *choked*
state, not allowed to request any data from the remote peer. After sending
an interested message the PeerConnection will be waiting to get *unchoked*.
Once the remote peer unchoked us, we can start requesting pieces.
The PeerConnection will continue to request pieces for as long as there are
pieces left to request, or until the remote peer disconnects.
If the connection with a remote peer drops, the PeerConnection will consume
the next available peer from off the queue and try to connect to that one
instead.
"""
def __init__(self, queue: Queue, info_hash,
peer_id, piece_manager, on_block_cb=None):
"""
Constructs a PeerConnection and add it to the asyncio event-loop.
Use `stop` to abort this connection and any subsequent connection
attempts
:param queue: The async Queue containing available peers
:param info_hash: The SHA1 hash for the meta-data's info
:param peer_id: Our peer ID used to to identify ourselves
:param piece_manager: The manager responsible to determine which pieces
to request
:param on_block_cb: The callback function to call when a block is
received from the remote peer
"""
self.my_state = []
self.peer_state = []
self.queue = queue
self.info_hash = info_hash
self.peer_id = peer_id
self.remote_id = None
self.writer = None
self.reader = None
self.piece_manager = piece_manager
self.on_block_cb = on_block_cb
self.future = asyncio.ensure_future(self._start()) # Start this worker
async def _start(self):
while 'stopped' not in self.my_state:
ip, port = await self.queue.get()
logging.info('Got assigned peer with: {ip}'.format(ip=ip))
try:
# TODO For some reason it does not seem to work to open a new
# connection if the first one drops (i.e. second loop).
self.reader, self.writer = await asyncio.open_connection(
ip, port)
logging.info('Connection open to peer: {ip}'.format(ip=ip))
# It's our responsibility to initiate the handshake.
buffer = await self._handshake()
# TODO Add support for sending data
# Sending BitField is optional and not needed when client does
# not have any pieces. Thus we do not send any bitfield message
# The default state for a connection is that peer is not
# interested and we are choked
self.my_state.append('choked')
# Let the peer know we're interested in downloading pieces
await self._send_interested()
self.my_state.append('interested')
# Start reading responses as a stream of messages for as
# long as the connection is open and data is transmitted
async for message in PeerStreamIterator(self.reader, buffer):
if 'stopped' in self.my_state:
break
if type(message) is BitField:
self.piece_manager.add_peer(self.remote_id,
message.bitfield)
elif type(message) is Interested:
self.peer_state.append('interested')
elif type(message) is NotInterested:
if 'interested' in self.peer_state:
self.peer_state.remove('interested')
elif type(message) is Choke:
self.my_state.append('choked')
"""
written/modified by Stefan Brynielsson, May 2019
"""
if "pending_request" in self.my_state:
self.my_state.remove('pending_request')
elif type(message) is Unchoke:
if 'choked' in self.my_state:
self.my_state.remove('choked')
"""
written/modified by Stefan Brynielsson, May 2019
"""
if "pending_request" in self.my_state:
self.my_state.remove('pending_request')
elif type(message) is Have:
self.piece_manager.update_peer(self.remote_id,
message.index)
elif type(message) is KeepAlive:
"""
Written/modified by Stefan Brynielsson, May 2019
"""
break
elif type(message) is Piece:
self.my_state.remove('pending_request')
self.on_block_cb(
peer_id=self.remote_id,
piece_index=message.index,
block_offset=message.begin,
data=message.block)
elif type(message) is Request:
# TODO Add support for sending data
logging.info('Ignoring the received Request message.')
elif type(message) is Cancel:
# TODO Add support for sending data
logging.info('Ignoring the received Cancel message.')
# Send block request to remote peer if we're interested
if 'choked' not in self.my_state:
if 'interested' in self.my_state:
if 'pending_request' not in self.my_state:
self.my_state.append('pending_request')
await self._request_piece()
except ProtocolError as e:
logging.exception('Protocol error')
except (ConnectionRefusedError, TimeoutError):
logging.warning('Unable to connect to peer')
except (ConnectionResetError, CancelledError):
logging.warning('Connection closed')
except Exception as e:
logging.exception('An error occurred')
self.cancel()
raise e
self.cancel()
def cancel(self):
"""
Sends the cancel message to the remote peer and closes the connection.
"""
logging.info('Closing peer {id}'.format(id=self.remote_id))
if not self.future.done():
self.future.cancel()
if self.writer:
self.writer.close()
self.queue.task_done()
def stop(self):
"""
Stop this connection from the current peer (if a connection exist) and
from connecting to any new peer.
"""
# Set state to stopped and cancel our future to break out of the loop.
# The rest of the cleanup will eventually be managed by loop calling
# `cancel`.
self.my_state.append('stopped')
if not self.future.done():
self.future.cancel()
async def _request_piece(self):
block = self.piece_manager.next_request(self.remote_id)
if block:
message = Request(block.piece, block.offset, block.length).encode()
logging.debug('Requesting block {block} for piece {piece} '
'of {length} bytes from peer {peer}'.format(
piece=block.piece,
block=block.offset,
length=block.length,
peer=self.remote_id))
self.writer.write(message)
await self.writer.drain()
async def _handshake(self):
"""
Send the initial handshake to the remote peer and wait for the peer
to respond with its handshake.
"""
self.writer.write(Handshake(self.info_hash, self.peer_id).encode())
"""
Written/modified by Stefan Brynielsson, May 2019
"""
time = datetime.datetime.now()
await self.writer.drain()
buf = b''
while len(buf) < Handshake.length:
buf = await self.reader.read(PeerStreamIterator.CHUNK_SIZE)
"""
Written/modified by Stefan Brynielsson, May 2019
"""
# End the handshake if it takes longer than HANDSHAKE_TIMEOUT seconds
if (datetime.datetime.now() - time).total_seconds() > HANDSHAKE_TIMEOUT and len(buf) < Handshake.length:
raise TimeoutError('NO handshake response')
response = Handshake.decode(buf[:Handshake.length])
if not response:
raise ProtocolError('Unable receive and parse a handshake')
if not response.info_hash == self.info_hash:
raise ProtocolError('Handshake with invalid info_hash')
# TODO: According to spec we should validate that the peer_id received
# from the peer match the peer_id received from the tracker.
self.remote_id = response.peer_id
logging.info('Handshake with peer was successful')
# We need to return the remaining buffer data, since we might have
# read more bytes then the size of the handshake message and we need
# those bytes to parse the next message.
return buf[Handshake.length:]
async def _send_interested(self):
message = Interested()
logging.debug('Sending message: {type}'.format(type=message))
self.writer.write(message.encode())
await self.writer.drain()
class PeerStreamIterator:
"""
The `PeerStreamIterator` is an async iterator that continuously reads from
the given stream reader and tries to parse valid BitTorrent messages from
off that stream of bytes.
If the connection is dropped, something fails the iterator will abort by
raising the `StopAsyncIteration` error ending the calling iteration.
"""
CHUNK_SIZE = 10*1024
def __init__(self, reader, initial: bytes = None):
self.reader = reader
self.buffer = initial if initial else b''
async def __aiter__(self):
return self
async def __anext__(self):
# Read data from the socket. When we have enough data to parse, parse
# it and return the message. Until then keep reading from stream
while True:
try:
data = await self.reader.read(PeerStreamIterator.CHUNK_SIZE)
if data:
self.buffer += data
message = self.parse()
if message:
return message
else:
logging.debug('No data read from stream')
if self.buffer:
message = self.parse()
if message:
return message
raise StopAsyncIteration()
except ConnectionResetError:
logging.debug('Connection closed by peer')
raise StopAsyncIteration()
except CancelledError:
raise StopAsyncIteration()
except StopAsyncIteration as e:
# Cath to stop logging
raise e
except Exception:
logging.exception('Error when iterating over stream!')
raise StopAsyncIteration()
raise StopAsyncIteration()
def parse(self):
"""
Tries to parse protocol messages if there is enough bytes read in the
buffer.
:return The parsed message, or None if no message could be parsed
"""
# Each message is structured as:
# <length prefix><message ID><payload>
#
# The `length prefix` is a four byte big-endian value
# The `message ID` is a decimal byte
# The `payload` is the value of `length prefix`
#
# The message length is not part of the actual length. So another
# 4 bytes needs to be included when slicing the buffer.
header_length = 4
if len(self.buffer) > 4: # 4 bytes is needed to identify the message
message_length = struct.unpack('>I', self.buffer[0:4])[0]
if message_length == 0:
return KeepAlive()
if len(self.buffer) >= message_length:
message_id = struct.unpack('>b', self.buffer[4:5])[0]
def _consume():
"""Consume the current message from the read buffer"""
self.buffer = self.buffer[header_length + message_length:]
def _data():
""""Extract the current message from the read buffer"""
return self.buffer[:header_length + message_length]
if message_id is PeerMessage.BitField:
data = _data()
_consume()
return BitField.decode(data)
elif message_id is PeerMessage.Interested:
_consume()
return Interested()
elif message_id is PeerMessage.NotInterested:
_consume()
return NotInterested()
elif message_id is PeerMessage.Choke:
_consume()
return Choke()
elif message_id is PeerMessage.Unchoke:
_consume()
return Unchoke()
elif message_id is PeerMessage.Have:
data = _data()
_consume()
return Have.decode(data)
elif message_id is PeerMessage.Piece:
data = _data()
_consume()
return Piece.decode(data)
elif message_id is PeerMessage.Request:
data = _data()
_consume()
return Request.decode(data)
elif message_id is PeerMessage.Cancel:
data = _data()
_consume()
return Cancel.decode(data)
else:
logging.info('Unsupported message!')
else:
logging.debug('Not enough in buffer in order to parse')
return None
class PeerMessage:
"""
A message between two peers.
All of the remaining messages in the protocol take the form of:
<length prefix><message ID><payload>
- The length prefix is a four byte big-endian value.
- The message ID is a single decimal byte.
- The payload is message dependent.
NOTE: The Handshake messageis different in layout compared to the other
messages.
Read more:
https://wiki.theory.org/BitTorrentSpecification#Messages
BitTorrent uses Big-Endian (Network Byte Order) for all messages, this is
declared as the first character being '>' in all pack / unpack calls to the
Python's `struct` module.
"""
Choke = 0
Unchoke = 1
Interested = 2
NotInterested = 3
Have = 4
BitField = 5
Request = 6
Piece = 7
Cancel = 8
Port = 9
Handshake = None # Handshake is not really part of the messages
KeepAlive = None # Keep-alive has no ID according to spec
def encode(self) -> bytes:
"""
Encodes this object instance to the raw bytes representing the entire
message (ready to be transmitted).
"""
pass
@classmethod
def decode(cls, data: bytes):
"""
Decodes the given BitTorrent message into a instance for the
implementing type.
"""
pass
class Handshake(PeerMessage):
"""
The handshake message is the first message sent and then received from a
remote peer.
The messages is always 68 bytes long (for this version of BitTorrent
protocol).
Message format:
<pstrlen><pstr><reserved><info_hash><peer_id>
In version 1.0 of the BitTorrent protocol:
pstrlen = 19
pstr = "BitTorrent protocol".
Thus length is:
49 + len(pstr) = 68 bytes long.
"""
length = 49 + 19
def __init__(self, info_hash: bytes, peer_id: bytes):
"""
Construct the handshake message
:param info_hash: The SHA1 hash for the info dict
:param peer_id: The unique peer id
"""
if isinstance(info_hash, str):
info_hash = info_hash.encode('utf-8')
if isinstance(peer_id, str):
peer_id = peer_id.encode('utf-8')
self.info_hash = info_hash
self.peer_id = peer_id
def encode(self) -> bytes:
"""
Encodes this object instance to the raw bytes representing the entire
message (ready to be transmitted).
"""
return struct.pack(
'>B19s8x20s20s',
19, # Single byte (B)
b'BitTorrent protocol', # String 19s
# Reserved 8x (pad byte, no value)
self.info_hash, # String 20s
self.peer_id) # String 20s
@classmethod
def decode(cls, data: bytes):
"""
Decodes the given BitTorrent message into a handshake message, if not
a valid message, None is returned.
"""
logging.debug('Decoding Handshake of length: {length}'.format(
length=len(data)))
if len(data) < (49 + 19):
return None
parts = struct.unpack('>B19s8x20s20s', data)
return cls(info_hash=parts[2], peer_id=parts[3])
def __str__(self):
return 'Handshake'
class KeepAlive(PeerMessage):
"""
The Keep-Alive message has no payload and length is set to zero.
Message format:
<len=0000>
"""
def __str__(self):
return 'KeepAlive'
class BitField(PeerMessage):
"""
The BitField is a message with variable length where the payload is a
bit array representing all the bits a peer have (1) or does not have (0).
Message format:
<len=0001+X><id=5><bitfield>
"""
def __init__(self, data):
self.bitfield = bitstring.BitArray(bytes=data)
def encode(self) -> bytes:
"""
Encodes this object instance to the raw bytes representing the entire
message (ready to be transmitted).
"""
bits_length = len(self.bitfield)
return struct.pack('>Ib' + str(bits_length) + 's',
1 + bits_length,
PeerMessage.BitField,
bytes(self.bitfield))
@classmethod
def decode(cls, data: bytes):
message_length = struct.unpack('>I', data[:4])[0]
logging.debug('Decoding BitField of length: {length}'.format(
length=message_length))
parts = struct.unpack('>Ib' + str(message_length - 1) + 's', data)
return cls(parts[2])
def __str__(self):
return 'BitField'
class Interested(PeerMessage):
"""
The interested message is fix length and has no payload other than the
message identifiers. It is used to notify each other about interest in
downloading pieces.
Message format:
<len=0001><id=2>
"""
def encode(self) -> bytes:
"""
Encodes this object instance to the raw bytes representing the entire
message (ready to be transmitted).
"""
return struct.pack('>Ib',
1, # Message length
PeerMessage.Interested)
def __str__(self):
return 'Interested'
class NotInterested(PeerMessage):
"""
The not interested message is fix length and has no payload other than the
message identifier. It is used to notify each other that there is no
interest to download pieces.
Message format:
<len=0001><id=3>
"""
def __str__(self):
return 'NotInterested'
class Choke(PeerMessage):
"""
The choke message is used to tell the other peer to stop send request
messages until unchoked.
Message format:
<len=0001><id=0>
"""
def __str__(self):
return 'Choke'
class Unchoke(PeerMessage):
"""
Unchoking a peer enables that peer to start requesting pieces from the
remote peer.
Message format:
<len=0001><id=1>
"""
def __str__(self):
return 'Unchoke'
class Have(PeerMessage):
"""
Represents a piece successfully downloaded by the remote peer. The piece
is a zero based index of the torrents pieces
"""
def __init__(self, index: int):
self.index = index
def encode(self):
return struct.pack('>IbI',
5, # Message length
PeerMessage.Have,
self.index)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Have of length: {length}'.format(
length=len(data)))
index = struct.unpack('>IbI', data)[2]
return cls(index)
def __str__(self):
return 'Have'
class Request(PeerMessage):
"""
The message used to request a block of a piece (i.e. a partial piece).
The request size for each block is 2^14 bytes, except the final block
that might be smaller (since not all pieces might be evenly divided by the
request size).
Message format:
<len=0013><id=6><index><begin><length>
"""
def __init__(self, index: int, begin: int, length: int = REQUEST_SIZE):
"""
Constructs the Request message.
:param index: The zero based piece index
:param begin: The zero based offset within a piece
:param length: The requested length of data (default 2^14)
"""
self.index = index
self.begin = begin
self.length = length
def encode(self):
return struct.pack('>IbIII',
13,
PeerMessage.Request,
self.index,
self.begin,
self.length)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Request of length: {length}'.format(
length=len(data)))
# Tuple with (message length, id, index, begin, length)
parts = struct.unpack('>IbIII', data)
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Request'
class Piece(PeerMessage):
"""
A block is a part of a piece mentioned in the meta-info. The official
specification refer to them as pieces as well - which is quite confusing
the unofficial specification refers to them as blocks however.
So this class is named `Piece` to match the message in the specification
but really, it represents a `Block` (which is non-existent in the spec).
Message format:
<length prefix><message ID><index><begin><block>
"""
# The Piece message length without the block data
length = 9
def __init__(self, index: int, begin: int, block: bytes):
"""
Constructs the Piece message.
:param index: The zero based piece index
:param begin: The zero based offset within a piece
:param block: The block data
"""
self.index = index
self.begin = begin
self.block = block
def encode(self):
message_length = Piece.length + len(self.block)
return struct.pack('>IbII' + str(len(self.block)) + 's',
message_length,
PeerMessage.Piece,
self.index,
self.begin,
self.block)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Piece of length: {length}'.format(
length=len(data)))
length = struct.unpack('>I', data[:4])[0]
parts = struct.unpack('>IbII' + str(length - Piece.length) + 's',
data[:length+4])
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Piece'
class Cancel(PeerMessage):
"""
The cancel message is used to cancel a previously requested block (in fact
the message is identical (besides from the id) to the Request message).
Message format:
<len=0013><id=8><index><begin><length>
"""
def __init__(self, index, begin, length: int = REQUEST_SIZE):
self.index = index
self.begin = begin
self.length = length
def encode(self):
return struct.pack('>IbIII',
13,
PeerMessage.Cancel,
self.index,
self.begin,
self.length)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Cancel of length: {length}'.format(
length=len(data)))
# Tuple with (message length, id, index, begin, length)
parts = struct.unpack('>IbIII', data)
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Cancel'
| 35.446335 | 116 | 0.570991 |
import asyncio
import logging
import struct
from asyncio import Queue
from concurrent.futures import CancelledError
import datetime
import bitstring
REQUEST_SIZE = 2**14
HANDSHAKE_TIMEOUT = 4
class ProtocolError(BaseException):
pass
class PeerConnection:
def __init__(self, queue: Queue, info_hash,
peer_id, piece_manager, on_block_cb=None):
self.my_state = []
self.peer_state = []
self.queue = queue
self.info_hash = info_hash
self.peer_id = peer_id
self.remote_id = None
self.writer = None
self.reader = None
self.piece_manager = piece_manager
self.on_block_cb = on_block_cb
self.future = asyncio.ensure_future(self._start())
async def _start(self):
while 'stopped' not in self.my_state:
ip, port = await self.queue.get()
logging.info('Got assigned peer with: {ip}'.format(ip=ip))
try:
self.reader, self.writer = await asyncio.open_connection(
ip, port)
logging.info('Connection open to peer: {ip}'.format(ip=ip))
buffer = await self._handshake()
# TODO Add support for sending data
# Sending BitField is optional and not needed when client does
# not have any pieces. Thus we do not send any bitfield message
# The default state for a connection is that peer is not
# interested and we are choked
self.my_state.append('choked')
# Let the peer know we're interested in downloading pieces
await self._send_interested()
self.my_state.append('interested')
async for message in PeerStreamIterator(self.reader, buffer):
if 'stopped' in self.my_state:
break
if type(message) is BitField:
self.piece_manager.add_peer(self.remote_id,
message.bitfield)
elif type(message) is Interested:
self.peer_state.append('interested')
elif type(message) is NotInterested:
if 'interested' in self.peer_state:
self.peer_state.remove('interested')
elif type(message) is Choke:
self.my_state.append('choked')
"""
written/modified by Stefan Brynielsson, May 2019
"""
if "pending_request" in self.my_state:
self.my_state.remove('pending_request')
elif type(message) is Unchoke:
if 'choked' in self.my_state:
self.my_state.remove('choked')
"""
written/modified by Stefan Brynielsson, May 2019
"""
if "pending_request" in self.my_state:
self.my_state.remove('pending_request')
elif type(message) is Have:
self.piece_manager.update_peer(self.remote_id,
message.index)
elif type(message) is KeepAlive:
"""
Written/modified by Stefan Brynielsson, May 2019
"""
break
elif type(message) is Piece:
self.my_state.remove('pending_request')
self.on_block_cb(
peer_id=self.remote_id,
piece_index=message.index,
block_offset=message.begin,
data=message.block)
elif type(message) is Request:
logging.info('Ignoring the received Request message.')
elif type(message) is Cancel:
logging.info('Ignoring the received Cancel message.')
if 'choked' not in self.my_state:
if 'interested' in self.my_state:
if 'pending_request' not in self.my_state:
self.my_state.append('pending_request')
await self._request_piece()
except ProtocolError as e:
logging.exception('Protocol error')
except (ConnectionRefusedError, TimeoutError):
logging.warning('Unable to connect to peer')
except (ConnectionResetError, CancelledError):
logging.warning('Connection closed')
except Exception as e:
logging.exception('An error occurred')
self.cancel()
raise e
self.cancel()
def cancel(self):
logging.info('Closing peer {id}'.format(id=self.remote_id))
if not self.future.done():
self.future.cancel()
if self.writer:
self.writer.close()
self.queue.task_done()
def stop(self):
# Set state to stopped and cancel our future to break out of the loop.
# The rest of the cleanup will eventually be managed by loop calling
# `cancel`.
self.my_state.append('stopped')
if not self.future.done():
self.future.cancel()
async def _request_piece(self):
block = self.piece_manager.next_request(self.remote_id)
if block:
message = Request(block.piece, block.offset, block.length).encode()
logging.debug('Requesting block {block} for piece {piece} '
'of {length} bytes from peer {peer}'.format(
piece=block.piece,
block=block.offset,
length=block.length,
peer=self.remote_id))
self.writer.write(message)
await self.writer.drain()
async def _handshake(self):
self.writer.write(Handshake(self.info_hash, self.peer_id).encode())
time = datetime.datetime.now()
await self.writer.drain()
buf = b''
while len(buf) < Handshake.length:
buf = await self.reader.read(PeerStreamIterator.CHUNK_SIZE)
# End the handshake if it takes longer than HANDSHAKE_TIMEOUT seconds
if (datetime.datetime.now() - time).total_seconds() > HANDSHAKE_TIMEOUT and len(buf) < Handshake.length:
raise TimeoutError('NO handshake response')
response = Handshake.decode(buf[:Handshake.length])
if not response:
raise ProtocolError('Unable receive and parse a handshake')
if not response.info_hash == self.info_hash:
raise ProtocolError('Handshake with invalid info_hash')
# TODO: According to spec we should validate that the peer_id received
# from the peer match the peer_id received from the tracker.
self.remote_id = response.peer_id
logging.info('Handshake with peer was successful')
# We need to return the remaining buffer data, since we might have
# read more bytes then the size of the handshake message and we need
# those bytes to parse the next message.
return buf[Handshake.length:]
async def _send_interested(self):
message = Interested()
logging.debug('Sending message: {type}'.format(type=message))
self.writer.write(message.encode())
await self.writer.drain()
class PeerStreamIterator:
CHUNK_SIZE = 10*1024
def __init__(self, reader, initial: bytes = None):
self.reader = reader
self.buffer = initial if initial else b''
async def __aiter__(self):
return self
async def __anext__(self):
# Read data from the socket. When we have enough data to parse, parse
# it and return the message. Until then keep reading from stream
while True:
try:
data = await self.reader.read(PeerStreamIterator.CHUNK_SIZE)
if data:
self.buffer += data
message = self.parse()
if message:
return message
else:
logging.debug('No data read from stream')
if self.buffer:
message = self.parse()
if message:
return message
raise StopAsyncIteration()
except ConnectionResetError:
logging.debug('Connection closed by peer')
raise StopAsyncIteration()
except CancelledError:
raise StopAsyncIteration()
except StopAsyncIteration as e:
# Cath to stop logging
raise e
except Exception:
logging.exception('Error when iterating over stream!')
raise StopAsyncIteration()
raise StopAsyncIteration()
def parse(self):
# Each message is structured as:
# <length prefix><message ID><payload>
#
# The `length prefix` is a four byte big-endian value
# The `message ID` is a decimal byte
# The `payload` is the value of `length prefix`
#
# The message length is not part of the actual length. So another
# 4 bytes needs to be included when slicing the buffer.
header_length = 4
if len(self.buffer) > 4: # 4 bytes is needed to identify the message
message_length = struct.unpack('>I', self.buffer[0:4])[0]
if message_length == 0:
return KeepAlive()
if len(self.buffer) >= message_length:
message_id = struct.unpack('>b', self.buffer[4:5])[0]
def _consume():
self.buffer = self.buffer[header_length + message_length:]
def _data():
return self.buffer[:header_length + message_length]
if message_id is PeerMessage.BitField:
data = _data()
_consume()
return BitField.decode(data)
elif message_id is PeerMessage.Interested:
_consume()
return Interested()
elif message_id is PeerMessage.NotInterested:
_consume()
return NotInterested()
elif message_id is PeerMessage.Choke:
_consume()
return Choke()
elif message_id is PeerMessage.Unchoke:
_consume()
return Unchoke()
elif message_id is PeerMessage.Have:
data = _data()
_consume()
return Have.decode(data)
elif message_id is PeerMessage.Piece:
data = _data()
_consume()
return Piece.decode(data)
elif message_id is PeerMessage.Request:
data = _data()
_consume()
return Request.decode(data)
elif message_id is PeerMessage.Cancel:
data = _data()
_consume()
return Cancel.decode(data)
else:
logging.info('Unsupported message!')
else:
logging.debug('Not enough in buffer in order to parse')
return None
class PeerMessage:
Choke = 0
Unchoke = 1
Interested = 2
NotInterested = 3
Have = 4
BitField = 5
Request = 6
Piece = 7
Cancel = 8
Port = 9
Handshake = None # Handshake is not really part of the messages
KeepAlive = None # Keep-alive has no ID according to spec
def encode(self) -> bytes:
pass
@classmethod
def decode(cls, data: bytes):
pass
class Handshake(PeerMessage):
length = 49 + 19
def __init__(self, info_hash: bytes, peer_id: bytes):
if isinstance(info_hash, str):
info_hash = info_hash.encode('utf-8')
if isinstance(peer_id, str):
peer_id = peer_id.encode('utf-8')
self.info_hash = info_hash
self.peer_id = peer_id
def encode(self) -> bytes:
return struct.pack(
'>B19s8x20s20s',
19, # Single byte (B)
b'BitTorrent protocol', # String 19s
# Reserved 8x (pad byte, no value)
self.info_hash, # String 20s
self.peer_id) # String 20s
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Handshake of length: {length}'.format(
length=len(data)))
if len(data) < (49 + 19):
return None
parts = struct.unpack('>B19s8x20s20s', data)
return cls(info_hash=parts[2], peer_id=parts[3])
def __str__(self):
return 'Handshake'
class KeepAlive(PeerMessage):
def __str__(self):
return 'KeepAlive'
class BitField(PeerMessage):
def __init__(self, data):
self.bitfield = bitstring.BitArray(bytes=data)
def encode(self) -> bytes:
bits_length = len(self.bitfield)
return struct.pack('>Ib' + str(bits_length) + 's',
1 + bits_length,
PeerMessage.BitField,
bytes(self.bitfield))
@classmethod
def decode(cls, data: bytes):
message_length = struct.unpack('>I', data[:4])[0]
logging.debug('Decoding BitField of length: {length}'.format(
length=message_length))
parts = struct.unpack('>Ib' + str(message_length - 1) + 's', data)
return cls(parts[2])
def __str__(self):
return 'BitField'
class Interested(PeerMessage):
def encode(self) -> bytes:
return struct.pack('>Ib',
1, # Message length
PeerMessage.Interested)
def __str__(self):
return 'Interested'
class NotInterested(PeerMessage):
def __str__(self):
return 'NotInterested'
class Choke(PeerMessage):
def __str__(self):
return 'Choke'
class Unchoke(PeerMessage):
def __str__(self):
return 'Unchoke'
class Have(PeerMessage):
def __init__(self, index: int):
self.index = index
def encode(self):
return struct.pack('>IbI',
5, # Message length
PeerMessage.Have,
self.index)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Have of length: {length}'.format(
length=len(data)))
index = struct.unpack('>IbI', data)[2]
return cls(index)
def __str__(self):
return 'Have'
class Request(PeerMessage):
def __init__(self, index: int, begin: int, length: int = REQUEST_SIZE):
self.index = index
self.begin = begin
self.length = length
def encode(self):
return struct.pack('>IbIII',
13,
PeerMessage.Request,
self.index,
self.begin,
self.length)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Request of length: {length}'.format(
length=len(data)))
# Tuple with (message length, id, index, begin, length)
parts = struct.unpack('>IbIII', data)
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Request'
class Piece(PeerMessage):
# The Piece message length without the block data
length = 9
def __init__(self, index: int, begin: int, block: bytes):
self.index = index
self.begin = begin
self.block = block
def encode(self):
message_length = Piece.length + len(self.block)
return struct.pack('>IbII' + str(len(self.block)) + 's',
message_length,
PeerMessage.Piece,
self.index,
self.begin,
self.block)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Piece of length: {length}'.format(
length=len(data)))
length = struct.unpack('>I', data[:4])[0]
parts = struct.unpack('>IbII' + str(length - Piece.length) + 's',
data[:length+4])
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Piece'
class Cancel(PeerMessage):
def __init__(self, index, begin, length: int = REQUEST_SIZE):
self.index = index
self.begin = begin
self.length = length
def encode(self):
return struct.pack('>IbIII',
13,
PeerMessage.Cancel,
self.index,
self.begin,
self.length)
@classmethod
def decode(cls, data: bytes):
logging.debug('Decoding Cancel of length: {length}'.format(
length=len(data)))
# Tuple with (message length, id, index, begin, length)
parts = struct.unpack('>IbIII', data)
return cls(parts[2], parts[3], parts[4])
def __str__(self):
return 'Cancel'
| true | true |
f73d3204a303755adfb5db20409a06b93174a623 | 1,966 | py | Python | t/test_multiple_outputs.py | texttheater/produce | 202e3196daf7ac53c1998ac2ee9b0f8cbb1c6615 | [
"MIT"
] | 14 | 2015-03-04T12:40:25.000Z | 2021-07-07T04:06:09.000Z | t/test_multiple_outputs.py | texttheater/produce | 202e3196daf7ac53c1998ac2ee9b0f8cbb1c6615 | [
"MIT"
] | 43 | 2015-01-27T13:52:39.000Z | 2020-10-17T19:51:20.000Z | t/test_multiple_outputs.py | texttheater/produce | 202e3196daf7ac53c1998ac2ee9b0f8cbb1c6615 | [
"MIT"
] | 2 | 2015-11-23T08:59:07.000Z | 2019-04-17T13:04:49.000Z | import logging
import prodtest
class MultipleOutputsTest(prodtest.ProduceTestCase):
"""
Tests the handling of recipes with multiple outputs.
"""
def test_without(self):
"""
Without the outputs attribute, the recipe is run twice, once for each
target, thus two INFO messages are generated:
"""
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('a.txt', 'b.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 4)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'a.txt', 'b.txt'])
def test_with(self):
"""
With the outputs attribute, the recipe is run only once:
"""
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('c.txt', 'd.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'c.txt', 'd.txt'])
def test_with_2(self):
"""
Same, but using the out. prefix instead of the outputs attribute.
"""
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('e.txt', 'f.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'e.txt', 'f.txt'])
def test_with_3(self):
"""
Same, mixing out. and outputs.
"""
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('g.txt', 'h.txt', 'i.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'g.txt', 'h.txt', 'i.txt'])
| 39.32 | 92 | 0.591048 | import logging
import prodtest
class MultipleOutputsTest(prodtest.ProduceTestCase):
def test_without(self):
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('a.txt', 'b.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 4)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'a.txt', 'b.txt'])
def test_with(self):
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('c.txt', 'd.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'c.txt', 'd.txt'])
def test_with_2(self):
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('e.txt', 'f.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'e.txt', 'f.txt'])
def test_with_3(self):
self.assertDirectoryContents(['produce.ini', 'Makefile'])
with self.assertLogs(logger='produce', level='INFO') as l:
self.produce('g.txt', 'h.txt', 'i.txt', **{'-j': '3'})
self.assertEqual(len(l.output), 2)
self.assertDirectoryContents(['produce.ini', 'Makefile', 'g.txt', 'h.txt', 'i.txt'])
| true | true |
f73d3257f62bc0ae7b535041cfd95569f3b0616a | 4,265 | py | Python | routines/pf/models/_sym.py | cmulvihill/mechdriver | 296cc3b4c154889c11470c7b82315b0382334b98 | [
"Apache-2.0"
] | null | null | null | routines/pf/models/_sym.py | cmulvihill/mechdriver | 296cc3b4c154889c11470c7b82315b0382334b98 | [
"Apache-2.0"
] | null | null | null | routines/pf/models/_sym.py | cmulvihill/mechdriver | 296cc3b4c154889c11470c7b82315b0382334b98 | [
"Apache-2.0"
] | null | null | null | """ Handle symmetry factor stuff
"""
import automol
from autofile import fs
from lib import structure
def symmetry_factor(pf_filesystems, pf_models, spc_dct_i, rotors,
frm_bnd_keys=(), brk_bnd_keys=()):
""" Calculate the symmetry factor for a species
Note: ignoring for saddle pts the possibility that two configurations
differ only in their torsional values.
As a result, the symmetry factor is a lower bound of the true value
"""
if 'sym_factor' in spc_dct_i:
sym_factor = spc_dct_i['sym_factor']
print(' - Reading symmetry number input by user:', sym_factor)
else:
# if automol.geom.is_atom(geo):
sym_model = pf_models['sym']
# Obtain geometry, energy, and symmetry filesystem
[cnf_fs, cnf_path, min_cnf_locs, _, _] = pf_filesystems['sym']
geo = cnf_fs[-1].file.geometry.read(min_cnf_locs)
# Obtain the external symssetry number
ext_sym = automol.geom.external_symmetry_factor(geo)
# Obtain the internal symmetry number using some routine
if sym_model == 'sampling':
# Set up the symmetry filesystem
sym_fs = fs.symmetry(cnf_path)
sym_geos = [geo]
sym_geos += [sym_fs[-1].file.geometry.read(locs)
for locs in sym_fs[-1].existing()]
# Obtain the internal
if rotors:
print(' - Determining internal sym number ',
'using sampling routine.')
int_sym = int_sym_num_from_sampling(
sym_geos,
frm_bnd_keys=frm_bnd_keys,
brk_bnd_keys=brk_bnd_keys)
else:
print(' - No torsions, internal sym is 1.0')
int_sym = 1.0
else:
print('No symmetry model requested, ',
'setting internal sym factor to 1.0')
int_sym = 1.0
# Obtain overall number
sym_factor = ext_sym * int_sym
# Reduce sym factor using rotor symmetries
sym_factor = tors_reduced_sym_factor(sym_factor, rotors)
# print('sym_factor test:', sym_factor)
return sym_factor
def int_sym_num_from_sampling(sym_geos, frm_bnd_keys=(), brk_bnd_keys=()):
""" Determine the symmetry number for a given conformer geometry.
(1) Explore the saved conformers to find the list of similar conformers -
i.e. those with a coulomb matrix and energy that are equivalent
to those for the reference geometry.
(2) Expand each of those similar conformers by applying
rotational permutations to each of the terminal groups.
(3) Count how many distinct distance matrices there are in
the fully expanded conformer list.
"""
# Set saddle
saddle = bool(frm_bnd_keys or brk_bnd_keys)
int_sym_num = 0
# modify geometries to remove H's from rotatable XHn end group
# this will be accounted for separately as multiplicative factor
mod_sym_geos = []
for geo_sym_i in sym_geos:
mod_geo_sym_i, end_group_factor = automol.geom.end_group_sym_factor(
geo_sym_i, frm_bnd_keys, brk_bnd_keys)
# print('end_group_factor test:', end_group_factor)
new_geom = True
for mod_geo_sym_j in mod_sym_geos:
if automol.geom.almost_equal_dist_matrix(
mod_geo_sym_i, mod_geo_sym_j, thresh=3e-1):
if saddle:
new_geom = False
break
tors_same = structure.geom.are_torsions_same(
mod_geo_sym_i, mod_geo_sym_j, ts_bnds=())
if tors_same:
new_geom = False
break
if new_geom:
mod_sym_geos.append(mod_geo_sym_i)
int_sym_num += 1
int_sym_num *= end_group_factor
return int_sym_num
def tors_reduced_sym_factor(sym_factor, rotors):
""" Decrease the overall molecular symmetry factor by the
torsional mode symmetry numbers
"""
for rotor in rotors:
for tors_name, tors_dct in rotor.items():
if 'D' in tors_name:
sym_factor /= tors_dct['sym_num']
return sym_factor
| 34.674797 | 77 | 0.616882 |
import automol
from autofile import fs
from lib import structure
def symmetry_factor(pf_filesystems, pf_models, spc_dct_i, rotors,
frm_bnd_keys=(), brk_bnd_keys=()):
if 'sym_factor' in spc_dct_i:
sym_factor = spc_dct_i['sym_factor']
print(' - Reading symmetry number input by user:', sym_factor)
else:
sym_model = pf_models['sym']
[cnf_fs, cnf_path, min_cnf_locs, _, _] = pf_filesystems['sym']
geo = cnf_fs[-1].file.geometry.read(min_cnf_locs)
ext_sym = automol.geom.external_symmetry_factor(geo)
if sym_model == 'sampling':
sym_fs = fs.symmetry(cnf_path)
sym_geos = [geo]
sym_geos += [sym_fs[-1].file.geometry.read(locs)
for locs in sym_fs[-1].existing()]
if rotors:
print(' - Determining internal sym number ',
'using sampling routine.')
int_sym = int_sym_num_from_sampling(
sym_geos,
frm_bnd_keys=frm_bnd_keys,
brk_bnd_keys=brk_bnd_keys)
else:
print(' - No torsions, internal sym is 1.0')
int_sym = 1.0
else:
print('No symmetry model requested, ',
'setting internal sym factor to 1.0')
int_sym = 1.0
sym_factor = ext_sym * int_sym
sym_factor = tors_reduced_sym_factor(sym_factor, rotors)
return sym_factor
def int_sym_num_from_sampling(sym_geos, frm_bnd_keys=(), brk_bnd_keys=()):
saddle = bool(frm_bnd_keys or brk_bnd_keys)
int_sym_num = 0
# this will be accounted for separately as multiplicative factor
mod_sym_geos = []
for geo_sym_i in sym_geos:
mod_geo_sym_i, end_group_factor = automol.geom.end_group_sym_factor(
geo_sym_i, frm_bnd_keys, brk_bnd_keys)
# print('end_group_factor test:', end_group_factor)
new_geom = True
for mod_geo_sym_j in mod_sym_geos:
if automol.geom.almost_equal_dist_matrix(
mod_geo_sym_i, mod_geo_sym_j, thresh=3e-1):
if saddle:
new_geom = False
break
tors_same = structure.geom.are_torsions_same(
mod_geo_sym_i, mod_geo_sym_j, ts_bnds=())
if tors_same:
new_geom = False
break
if new_geom:
mod_sym_geos.append(mod_geo_sym_i)
int_sym_num += 1
int_sym_num *= end_group_factor
return int_sym_num
def tors_reduced_sym_factor(sym_factor, rotors):
for rotor in rotors:
for tors_name, tors_dct in rotor.items():
if 'D' in tors_name:
sym_factor /= tors_dct['sym_num']
return sym_factor
| true | true |
f73d327e2f83cfcb8714c4fa54d314b74ae3fd9f | 432 | py | Python | djangocms_versioning/test_utils/people/models.py | webbyfox/djangocms-versioning | a466ff0f8d109a22ec2f567cace6ef69d332180c | [
"BSD-3-Clause"
] | null | null | null | djangocms_versioning/test_utils/people/models.py | webbyfox/djangocms-versioning | a466ff0f8d109a22ec2f567cace6ef69d332180c | [
"BSD-3-Clause"
] | null | null | null | djangocms_versioning/test_utils/people/models.py | webbyfox/djangocms-versioning | a466ff0f8d109a22ec2f567cace6ef69d332180c | [
"BSD-3-Clause"
] | null | null | null | from django.db import models
class Person(models.Model):
name = models.TextField()
def __str__(self):
return "{} ({})".format(self.name, self.pk)
class PersonContent(models.Model):
person = models.ForeignKey(Person, on_delete=models.CASCADE)
language = models.TextField()
text = models.TextField()
def __str__(self):
return self.text
def get_absolute_url(self):
return '/'
| 20.571429 | 64 | 0.655093 | from django.db import models
class Person(models.Model):
name = models.TextField()
def __str__(self):
return "{} ({})".format(self.name, self.pk)
class PersonContent(models.Model):
person = models.ForeignKey(Person, on_delete=models.CASCADE)
language = models.TextField()
text = models.TextField()
def __str__(self):
return self.text
def get_absolute_url(self):
return '/'
| true | true |
f73d349e984d5454ce11e084b531095d6331c012 | 206 | py | Python | solution/app/admin.py | itsnikhil/atlan-challenge | 41fc403f2156a5d1f182f58ce52458d4f04ec0c4 | [
"MIT"
] | null | null | null | solution/app/admin.py | itsnikhil/atlan-challenge | 41fc403f2156a5d1f182f58ce52458d4f04ec0c4 | [
"MIT"
] | null | null | null | solution/app/admin.py | itsnikhil/atlan-challenge | 41fc403f2156a5d1f182f58ce52458d4f04ec0c4 | [
"MIT"
] | null | null | null | from django.contrib import admin
from app.models import DataStore, GameSale
# Show files uploaded in django admin
admin.site.register(DataStore)
# Show record in django admin
admin.site.register(GameSale) | 25.75 | 42 | 0.815534 | from django.contrib import admin
from app.models import DataStore, GameSale
admin.site.register(DataStore)
admin.site.register(GameSale) | true | true |
f73d351428b1cb2945097f900a83553d32216fd3 | 2,240 | py | Python | backend/app/schemas/cars_search_query.py | BartlomiejRasztabiga/Rentally | ba70199d329895a5295ceddd0ecc4c61928890dd | [
"MIT"
] | 2 | 2021-01-11T23:24:29.000Z | 2021-01-12T09:55:58.000Z | backend/app/schemas/cars_search_query.py | BartlomiejRasztabiga/Rentally | ba70199d329895a5295ceddd0ecc4c61928890dd | [
"MIT"
] | null | null | null | backend/app/schemas/cars_search_query.py | BartlomiejRasztabiga/Rentally | ba70199d329895a5295ceddd0ecc4c61928890dd | [
"MIT"
] | null | null | null | import abc
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel
from sqlalchemy.sql.elements import BinaryExpression
from app.models import Car
from app.models.car import AcType, CarType, DriveType, FuelType, GearboxType
class RangeCriterion(BaseModel):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def to_condition(self) -> BinaryExpression:
raise NotImplementedError()
class NumberOfPassengersRange(RangeCriterion):
start: int
end: int
def to_condition(self) -> BinaryExpression:
return Car.number_of_passengers.between(self.start, self.end)
class PricePerDayRange(RangeCriterion):
start: float
end: float
def to_condition(self) -> BinaryExpression:
return Car.price_per_day.between(self.start, self.end)
class AvailabilityDatesRange(BaseModel):
start: datetime
end: datetime
class CarsSearchQuery(BaseModel):
model_name: Optional[str] = None
type: Optional[CarType] = None
fuel_type: Optional[FuelType] = None
gearbox_type: Optional[GearboxType] = None
ac_type: Optional[AcType] = None
drive_type: Optional[DriveType] = None
number_of_passengers: Optional[NumberOfPassengersRange] = None
price_per_day: Optional[PricePerDayRange] = None
availability_dates: Optional[AvailabilityDatesRange] = None
def to_conditions(self) -> List[BinaryExpression]:
"""
Returns list of SQLAlchemy filter conditions based on query object values
"""
conditions = []
for field_name in CarsSearchQuery.__fields__.keys():
value = getattr(self, field_name)
if value is not None:
if isinstance(value, RangeCriterion):
conditions.append(value.to_condition())
elif isinstance(
value, str
): # use ilike on str fields instead of exact match
conditions.append(getattr(Car, field_name).ilike(f"%{value}%"))
elif isinstance(value, AvailabilityDatesRange): # skip
pass
else:
conditions.append(getattr(Car, field_name) == value)
return conditions
| 31.549296 | 83 | 0.674107 | import abc
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel
from sqlalchemy.sql.elements import BinaryExpression
from app.models import Car
from app.models.car import AcType, CarType, DriveType, FuelType, GearboxType
class RangeCriterion(BaseModel):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def to_condition(self) -> BinaryExpression:
raise NotImplementedError()
class NumberOfPassengersRange(RangeCriterion):
start: int
end: int
def to_condition(self) -> BinaryExpression:
return Car.number_of_passengers.between(self.start, self.end)
class PricePerDayRange(RangeCriterion):
start: float
end: float
def to_condition(self) -> BinaryExpression:
return Car.price_per_day.between(self.start, self.end)
class AvailabilityDatesRange(BaseModel):
start: datetime
end: datetime
class CarsSearchQuery(BaseModel):
model_name: Optional[str] = None
type: Optional[CarType] = None
fuel_type: Optional[FuelType] = None
gearbox_type: Optional[GearboxType] = None
ac_type: Optional[AcType] = None
drive_type: Optional[DriveType] = None
number_of_passengers: Optional[NumberOfPassengersRange] = None
price_per_day: Optional[PricePerDayRange] = None
availability_dates: Optional[AvailabilityDatesRange] = None
def to_conditions(self) -> List[BinaryExpression]:
conditions = []
for field_name in CarsSearchQuery.__fields__.keys():
value = getattr(self, field_name)
if value is not None:
if isinstance(value, RangeCriterion):
conditions.append(value.to_condition())
elif isinstance(
value, str
):
conditions.append(getattr(Car, field_name).ilike(f"%{value}%"))
elif isinstance(value, AvailabilityDatesRange):
pass
else:
conditions.append(getattr(Car, field_name) == value)
return conditions
| true | true |
f73d36f0dfeeaff0a96d34b47f6c41f1dd0588cf | 143,997 | py | Python | src/genie/libs/parser/nxos/show_interface.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/nxos/show_interface.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/nxos/show_interface.py | psolarcz/genieparser | 811c197a1dab6a635e6dec145b99194648bf4ff4 | [
"Apache-2.0"
] | null | null | null | '''show_interface.py
NXOS parsers for the following show commands:
* show interface
* show vrf all interface
* show ip interface vrf all
* show ipv6 interface detail vrf all
* show interface switchport
* show ip interface brief
* show ip interface brief | vlan
* show interface brief
* show interface {interface} brief
* show running-config interface {interface}
'''
# python
import re
# metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
# import parser utils
from genie.libs.parser.utils.common import Common
# ===========================
# Schema for 'show interface'
# ===========================
class ShowInterfaceSchema(MetaParser):
"""Schema for show interface"""
schema = {
Any():
{Optional('description'): str,
Optional('types'): str,
Optional('parent_interface'): str,
'oper_status': str,
Optional('admin_state'): str,
Optional('dedicated_intface'): bool,
Optional('line_protocol'): str,
Optional('autostate'): bool,
Optional('link_state'): str,
Optional('phys_address'): str,
Optional('port_speed'): str,
Optional('mtu'): int,
'enabled': bool,
Optional('mac_address'): str,
Optional('auto_negotiate'): bool,
Optional('duplex_mode'): str,
Optional('port_mode'): str,
Optional('auto_mdix'): str,
Optional('switchport_monitor'): str,
Optional('efficient_ethernet'): str,
Optional('last_link_flapped'): str,
Optional('interface_reset'): int,
Optional('ethertype'): str,
Optional('beacon'): str,
Optional('medium'): str,
Optional('reliability'): str,
Optional('txload'): str,
Optional('rxload'): str,
Optional('delay'): int,
Optional('media_type'): str,
Optional('flow_control'):
{Optional('receive'): bool,
Optional('send'): bool,
},
Optional('port_channel'):
{Optional('port_channel_member'): bool,
Optional('port_channel_int'): str,
Optional('port_channel_member_intfs'): list
},
Optional('bandwidth'): int,
Optional('counters'):
{Optional('rate'):
{Optional('load_interval'): int,
Optional('in_rate'): int,
Optional('in_rate_pkts'): int,
Optional('out_rate'): int,
Optional('out_rate_pkts'): int,
Optional('in_rate_bps'): int,
Optional('in_rate_pps'): int,
Optional('out_rate_bps'): int,
Optional('out_rate_pps'): int,
},
Optional('in_unicast_pkts'): int,
Optional('in_multicast_pkts'): int,
Optional('in_broadcast_pkts'): int,
Optional('in_discards'): int,
Optional('in_crc_errors'): int,
Optional('in_oversize_frames'): int,
Optional('in_pkts'): int,
Optional('in_mac_pause_frames'): int,
Optional('in_jumbo_packets'): int,
Optional('in_storm_suppression_packets'): int,
Optional('in_runts'): int,
Optional('in_oversize_frame'): int,
Optional('in_overrun'): int,
Optional('in_underrun'): int,
Optional('in_ignored'): int,
Optional('in_watchdog'): int,
Optional('in_bad_etype_drop'): int,
Optional('in_unknown_protos'): int,
Optional('in_if_down_drop'): int,
Optional('in_with_dribble'): int,
Optional('in_discard'): int,
Optional('in_octets'): int,
Optional('in_errors'): int,
Optional('in_short_frame'): int,
Optional('in_no_buffer'): int,
Optional('out_pkts'): int,
Optional('out_unicast_pkts'): int,
Optional('out_multicast_pkts'): int,
Optional('out_broadcast_pkts'): int,
Optional('out_discard'): int,
Optional('out_octets'): int,
Optional('out_jumbo_packets'): int,
Optional('out_errors'): int,
Optional('out_collision'): int,
Optional('out_deferred'): int,
Optional('out_late_collision'): int,
Optional('out_lost_carrier'): int,
Optional('out_no_carrier'): int,
Optional('out_babble'): int,
Optional('last_clear'): str,
Optional('tx'): bool,
Optional('rx'): bool,
Optional('out_mac_pause_frames'): int,
},
Optional('encapsulations'):
{Optional('encapsulation'): str,
Optional('first_dot1q'): str,
Optional('native_vlan'): int,
},
Optional('ipv4'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('secondary'): bool,
Optional('route_tag'): str
},
},
},
}
# ===========================
# Parser for 'show interface'
# ===========================
class ShowInterface(ShowInterfaceSchema):
"""Parser for show interface, show interface <interface>"""
cli_command = ['show interface', 'show interface {interface}']
exclude = [
'in_unicast_pkts',
'out_unicast_pkts',
'in_octets',
'out_octets',
'in_pkts',
'out_pkts',
'in_multicast_pkts',
'out_multicast_pkts',
'in_rate',
'out_rate',
'in_broadcast_pkts',
'out_broadcast_pkts',
'last_link_flapped',
'in_rate_pkts',
'out_rate_pkts',
'out_rate_bps',
'in_rate_bps',
'interface_reset',
'in_rate_pps',
'out_rate_pps',
'last_clear',
'out_jumbo_packets',
'in_jumbo_packets',
'rxload',
'txload',
'in_errors',
'mac_address',
'phys_address',
'in_crc_errors',
'reliability']
def cli(self, interface="", output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# Ethernet2/1.10 is down (Administratively down)
p1 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>(down))'
'( *\((?P<link_state>[a-zA-Z0-9\-\s]+)\))?$')
# Vlan1 is down (Administratively down), line protocol is down, autostate enabled
# Vlan23 is administratively down (Administratively down), line protocol is down, autostate enabled
p1_1 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>[\w\s]+)'
'( *\((?P<link_state>[\w\-\/\s]+)\))?, +'
'line +protocol +is +(?P<line_protocol>\w+),? *'
'(autostate +(?P<autostate>\w+))?$')
# Ethernet2/2 is up
p1_2 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>(up))'
'( *\((?P<link_state>[a-zA-Z\s]+)\))?$')
# admin state is up
# admin state is up,
# admin state is up, Dedicated Interface
# admin state is up, Dedicated Interface, [parent interface is Ethernet2/1]
p2 = re.compile(r'^admin +state +is'
' +(?P<admin_state>([a-zA-Z0-9\/\.]+))(?:,)?'
'(?: +(?P<dedicated_intf>(Dedicated Interface)))?'
'(?:, +\[parent +interface +is'
' +(?P<parent_intf>(\S+))\])?$')
# Dedicated Interface
p2_1 = re.compile(r'^Dedicated Interface$')
# Belongs to Po1
p2_2 = re.compile(r'^Belongs *to *(?P<port_channel_int>[a-zA-Z0-9]+)$')
# Hardware: Ethernet, address: 5254.00c9.d26e (bia 5254.00c9.d26e)
p3 = re.compile(r'^Hardware: *(?P<types>[a-zA-Z0-9\/\s]+),'
' *address: *(?P<mac_address>[a-z0-9\.]+)'
' *\(bia *(?P<phys_address>[a-z0-9\.]+)\)$')
#Description: desc
p4 = re.compile(r'^Description: *(?P<description>.*)$')
#Internet Address is 10.4.4.4/24 secondary tag 10
p5 = re.compile(r'^Internet *Address *is *(?P<ip>[0-9\.]+)'
'\/(?P<prefix_length>[0-9]+)'
'(?: *(?P<secondary>(secondary)))?(?: *tag'
' *(?P<route_tag>[0-9]+))?$')
# MTU 1600 bytes, BW 768 Kbit, DLY 3330 usec
# MTU 1500 bytes, BW 1000000 Kbit, DLY 10 usec,
# MTU 1500 bytes, BW 1000000 Kbit
p6 = re.compile(r'^MTU *(?P<mtu>[0-9]+) *bytes, *BW'
' *(?P<bandwidth>[0-9]+) *Kbit(, *DLY'
' *(?P<delay>[0-9]+) *usec)?,?$')
# MTU 1500 bytes, BW 40000000 Kbit,, BW 40000000 Kbit, DLY 10 usec
p6_1 = re.compile(r'^MTU *(?P<mtu>[0-9]+) *bytes, *BW'
' *(?P<bandwidth>[0-9]+) *Kbit, *,? *BW'
' *([0-9]+) *Kbit, *DLY'
' *(?P<delay>[0-9]+) *usec$')
# reliability 255/255, txload 1/255, rxload 1/255
p7 = re.compile(r'^reliability *(?P<reliability>[0-9\/]+),'
' *txload *(?P<txload>[0-9\/]+),'
' *rxload *(?P<rxload>[0-9\/]+)$')
#Encapsulation 802.1Q Virtual LAN, Vlan ID 10, medium is broadcast
#Encapsulation 802.1Q Virtual LAN, Vlan ID 20, medium is p2p
#Encapsulation ARPA, medium is broadcast
p8 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *medium *is *(?P<medium>[a-zA-Z]+)$')
p8_1 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *Vlan *ID *(?P<first_dot1q>[0-9]+),'
' *medium *is *(?P<medium>[a-z0-9]+)$')
# Encapsulation ARPA, loopback not set
p8_2 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *([\w\s]+)$')
#Port mode is routed
p9 = re.compile(r'^Port *mode *is *(?P<port_mode>[a-z]+)$')
# auto-duplex, auto-speed
p10_1 = re.compile(r'^auto-duplex, +auto-speed$')
#full-duplex, 1000 Mb/s
# auto-duplex, auto-speed
# full-duplex, 1000 Mb/s, media type is 1G
# auto-duplex, auto-speed, media type is 10G
p10 = re.compile(r'^(?P<duplex_mode>[a-z]+)-duplex, *(?P<port_speed>[a-z0-9\-]+)(?: '
'*[G|M]b/s)?(?:, +media +type +is (?P<media_type>\w+))?$')
#Beacon is turned off
p11 = re.compile(r'^Beacon *is *turned *(?P<beacon>[a-z]+)$')
#Auto-Negotiation is turned off
p12 = re.compile(r'^Auto-Negotiation *is *turned'
' *(?P<auto_negotiate>(off))$')
#Auto-Negotiation is turned on
p12_1 = re.compile(r'^Auto-Negotiation *is *turned'
' *(?P<auto_negotiate>(on))$')
#Input flow-control is off, output flow-control is off
p13 = re.compile(r'^Input *flow-control *is *(?P<receive>(off)+),'
' *output *flow-control *is *(?P<send>(off)+)$')
#Input flow-control is off, output flow-control is on
p13_1 = re.compile(r'^Input *flow-control *is *(?P<receive>(on)+),'
' *output *flow-control *is *(?P<send>(on)+)$')
#Auto-mdix is turned off
p14 = re.compile(r'^Auto-mdix *is *turned *(?P<auto_mdix>[a-z]+)$')
#Switchport monitor is off
p15 = re.compile(r'^Switchport *monitor *is *(?P<switchport_monitor>[a-z]+)$')
#EtherType is 0x8100
p16 = re.compile(r'^EtherType *is *(?P<ethertype>[a-z0-9]+)$')
# Members in this channel: Eth1/15, Eth1/16
# Members in this channel: Eth1/28
p38 = re.compile(r'^Members +in +this +channel *: *'
'(?P<port_channel_member_intfs>[\w\/\.\-\,\s]+)$')
#EEE (efficient-ethernet) : n/a
p17 = re.compile(r'^EEE *\(efficient-ethernet\) *:'
' *(?P<efficient_ethernet>[A-Za-z\/]+)$')
#Last link flapped 00:07:28
p18 = re.compile(r'^Last *link *flapped'
' *(?P<last_link_flapped>[a-z0-9\:]+)$')
# Last clearing of "show interface" counters never
p19 = re.compile(r'^Last *clearing *of *\"show *interface\"'
' *counters *(?P<last_clear>[a-z0-9\:]+)$')
# Last clearing of "" counters 00:15:42
p19_1 = re.compile(r'^Last *clearing *of *\" *\"'
' *counters *(?P<last_clear>[a-z0-9\:]+)$')
#1 interface resets
p20 = re.compile(r'^(?P<interface_reset>[0-9]+) *interface'
' *resets$')
# 1 minute input rate 0 bits/sec, 0 packets/sec
p21 = re.compile(r'^(?P<load_interval>[0-9\#]+)'
' *(minute|second|minutes|seconds) *input *rate'
' *(?P<in_rate>[0-9]+) *bits/sec,'
' *(?P<in_rate_pkts>[0-9]+) *packets/sec$')
#1 minute output rate 24 bits/sec, 0 packets/sec
p22 = re.compile(r'^(?P<load_interval>[0-9\#]+)'
' *(minute|second|minutes|seconds) *output'
' *rate *(?P<out_rate>[0-9]+)'
' *bits/sec, *(?P<out_rate_pkts>[0-9]+)'
' *packets/sec$')
#input rate 0 bps, 0 pps; output rate 0 bps, 0 pps
p23 = re.compile(r'^input *rate *(?P<in_rate_bps>[0-9]+) *bps,'
' *(?P<in_rate_pps>[0-9]+) *pps; *output *rate'
' *(?P<out_rate_bps>[0-9]+) *bps,'
' *(?P<out_rate_pps>[0-9]+) *pps$')
# RX
p23_1 = re.compile(r'^(?P<rx>(RX))$')
#0 unicast packets 0 multicast packets 0 broadcast packets
p24 = re.compile(r'^(?P<in_unicast_pkts>[0-9]+) +unicast +packets'
' +(?P<in_multicast_pkts>[0-9]+) +multicast +packets'
' +(?P<in_broadcast_pkts>[0-9]+) +broadcast +packets$')
# 0 input packets 0 bytes
# 607382344 input packets 445986207 unicast packets 132485585 multicast packets
p25 = re.compile(r'^(?P<in_pkts>[0-9]+) +input +packets(?: '
'+(?P<in_octets>[0-9]+) +bytes)?(?: +(?P<in_unicast_pkts>[0-9]+) '
'+unicast +packets +(?P<in_multicast_pkts>[0-9]+) +multicast +packets)?$')
#0 jumbo packets 0 storm suppression packets
p26 = re.compile(r'^(?P<in_jumbo_packets>[0-9]+) +jumbo +packets'
' *(?P<in_storm_suppression_packets>[0-9]+)'
' *storm *suppression *packets$')
#0 runts 0 giants 0 CRC/FCS 0 no buffer
#0 runts 0 giants 0 CRC 0 no buffer
p27 = re.compile(r'^(?P<in_runts>[0-9]+) *runts'
' *(?P<in_oversize_frame>[0-9]+) *giants'
' *(?P<in_crc_errors>[0-9]+) *CRC(/FCS)?'
' *(?P<in_no_buffer>[0-9]+) *no *buffer$')
#0 input error 0 short frame 0 overrun 0 underrun 0 ignored
p28 = re.compile(r'^(?P<in_errors>[0-9]+) *input *error'
' *(?P<in_short_frame>[0-9]+) *short *frame'
' *(?P<in_overrun>[0-9]+) *overrun *(?P<in_underrun>[0-9]+)'
' *underrun *(?P<in_ignored>[0-9]+) *ignored$')
#0 watchdog 0 bad etype drop 0 bad proto drop 0 if down drop
p29 = re.compile(r'^(?P<in_watchdog>[0-9]+) *watchdog'
' *(?P<in_bad_etype_drop>[0-9]+)'
' *bad *etype *drop *(?P<in_unknown_protos>[0-9]+)'
' *bad *proto'
' *drop *(?P<in_if_down_drop>[0-9]+) *if *down *drop$')
# 0 input with dribble 0 input discard
p30 = re.compile(r'^(?P<in_with_dribble>[0-9]+) *input *with'
' *dribble *(?P<in_discard>[0-9]+) *input *discard$')
# 0 Rx pause
p31 = re.compile(r'^(?P<in_mac_pause_frames>[0-9]+) *Rx *pause$')
# TX
p31_1 = re.compile(r'^(?P<tx>(TX))$')
#0 unicast packets 0 multicast packets 0 broadcast packets
p32 = re.compile(r'^(?P<out_unicast_pkts>[0-9]+) *unicast *packets'
' *(?P<out_multicast_pkts>[0-9]+) *multicast *packets'
' *(?P<out_broadcast_pkts>[0-9]+) *broadcast *packets$')
#0 output packets 0 bytes
p33 = re.compile(r'^(?P<out_pkts>[0-9]+) *output *packets'
' *(?P<out_octets>[0-9]+) *bytes$')
#0 jumbo packets
p34 = re.compile(r'^(?P<out_jumbo_packets>[0-9]+) *jumbo *packets$')
#0 output error 0 collision 0 deferred 0 late collision
p35 = re.compile(r'^(?P<out_errors>[0-9]+) *output *error'
' *(?P<out_collision>[0-9]+) *collision'
' *(?P<out_deferred>[0-9]+) *deferred'
' *(?P<out_late_collision>[0-9]+)'
' *late *collision$')
#0 lost carrier 0 no carrier 0 babble 0 output discard
p36 = re.compile(r'^(?P<out_lost_carrier>[0-9]+) *lost *carrier'
' *(?P<out_no_carrier>[0-9]+) *no *carrier'
' *(?P<out_babble>[0-9]+) *babble'
' *(?P<out_discard>[0-9]+) *output *discard$')
#0 Tx pause
p37 = re.compile(r'^(?P<out_mac_pause_frames>[0-9]+) *Tx *pause$')
# Members in this channel: Eth1/15, Eth1/16
# Members in this channel: Eth1/28
p38 = re.compile(r'^Members +in +this +channel *: *'
'(?P<port_channel_member_intfs>[\w\/\.\-\,\s]+)$')
# 28910552 broadcast packets 63295517997 bytes
p39 = re.compile(r'^(?P<in_broadcast_pkts>[0-9]+) +broadcast +packets +(?P<in_octets>[0-9]+) +bytes$')
interface_dict = {}
rx = False
tx = False
for line in out.splitlines():
line = line.replace('\t', ' ')
line = line.strip()
# Ethernet2/1.10 is down (Administratively down)
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
interface_dict[interface]['enabled'] = False
interface_dict[interface]['oper_status'] = 'down'
continue
# Vlan1 is down (Administratively down), line protocol is down, autostate enabled
# Vlan23 is administratively down (Administratively down), line protocol is down, autostate enabled
m = p1_1.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
line_protocol = m.groupdict()['line_protocol']
autostate = m.groupdict()['autostate']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
if enabled:
enabled = enabled.lower()
interface_dict[interface]['enabled'] = False if 'down' in enabled else True
interface_dict[interface]['oper_status'] = enabled.strip()
if line_protocol:
interface_dict[interface]['line_protocol'] = line_protocol.lower()
if autostate:
interface_dict[interface]['autostate'] = True if \
autostate.lower() == 'enabled' else False
continue
# Ethernet2/2 is up
m = p1_2.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
interface_dict[interface]['enabled'] = True
interface_dict[interface]['oper_status'] = 'up'
continue
# admin state is up
# admin state is up,
# admin state is up, Dedicated Interface
# admin state is up, Dedicated Interface, [parent interface is Ethernet2/1]
m = p2.match(line)
if m:
# admin_state
interface_dict[interface]['admin_state'] = \
m.groupdict()['admin_state']
# dedicated_interface
if m.groupdict()['dedicated_intf']:
interface_dict[interface]['dedicated_intface'] = True
# parent_interface
if m.groupdict()['parent_intf']:
interface_dict[interface]['parent_interface'] = \
m.groupdict()['parent_intf']
continue
# Dedicated Interface
m = p2_1.match(line)
if m:
interface_dict[interface]['dedicated_intface'] = True
continue
# Belongs to Po1
m = p2_2.match(line)
if m:
port_channel_int = str(m.groupdict()['port_channel_int'])
if 'port_channel' not in interface_dict[interface]:
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = True
interface_dict[interface]['port_channel']\
['port_channel_int'] = Common.convert_intf_name(port_channel_int)
continue
# Hardware: Ethernet, address: 5254.00c9.d26e (bia 5254.00c9.d26e)
m = p3.match(line)
if m:
types = m.groupdict()['types']
mac_address = m.groupdict()['mac_address']
phys_address = m.groupdict()['phys_address']
interface_dict[interface]['types'] = types
interface_dict[interface]\
['mac_address'] = mac_address
interface_dict[interface]\
['phys_address'] = phys_address
continue
#Description: desc
m = p4.match(line)
if m:
description = m.groupdict()['description']
interface_dict[interface]['description'] = description
continue
#Internet Address is 10.4.4.4/24 secondary tag 10
m = p5.match(line)
if m:
ip = m.groupdict()['ip']
prefix_length = str(m.groupdict()['prefix_length'])
secondary = m.groupdict()['secondary']
route_tag = m.groupdict()['route_tag']
#address = ipv4+prefix_length
address = ip + '/' + prefix_length
if 'ipv4' not in interface_dict[interface]:
interface_dict[interface]['ipv4'] = {}
if address not in interface_dict[interface]['ipv4']:
interface_dict[interface]['ipv4'][address] = {}
interface_dict[interface]['ipv4'][address]\
['ip'] = ip
interface_dict[interface]['ipv4'][address]\
['prefix_length'] = prefix_length
if secondary:
interface_dict[interface]['ipv4'][address]\
['secondary'] = True
if route_tag:
interface_dict[interface]['ipv4'][address]\
['route_tag'] = route_tag
continue
# MTU 1600 bytes, BW 768 Kbit, DLY 3330 usec
# MTU 1500 bytes, BW 1000000 Kbit, DLY 10 usec,
# MTU 1500 bytes, BW 1000000 Kbit
m = p6.match(line)
if m:
mtu = int(m.groupdict()['mtu'])
bandwidth = int(m.groupdict()['bandwidth'])
if m.groupdict()['delay']:
interface_dict[interface]['delay'] = int(m.groupdict()['delay'])
interface_dict[interface]['mtu'] = mtu
interface_dict[interface]['bandwidth'] = bandwidth
continue
# MTU 1500 bytes, BW 40000000 Kbit,, BW 40000000 Kbit, DLY 10 usec
m = p6_1.match(line)
if m:
mtu = int(m.groupdict()['mtu'])
bandwidth = int(m.groupdict()['bandwidth'])
interface_dict[interface]['mtu'] = mtu
interface_dict[interface]['bandwidth'] = bandwidth
interface_dict[interface]['delay'] = int(m.groupdict()['delay'])
continue
# reliability 255/255, txload 1/255, rxload 1/255
m = p7.match(line)
if m:
reliability = m.groupdict()['reliability']
txload = m.groupdict()['txload']
rxload = m.groupdict()['rxload']
interface_dict[interface]['reliability'] = reliability
interface_dict[interface]['txload'] = txload
interface_dict[interface]['rxload'] = rxload
continue
#Encapsulation 802.1Q Virtual LAN, Vlan ID 10, medium is broadcast
#Encapsulation 802.1Q Virtual LAN, Vlan ID 20, medium is p2p
#Encapsulation ARPA, medium is broadcast
m = p8.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
encapsulation = encapsulation.replace("802.1q virtual lan","dot1q")
medium = m.groupdict()['medium']
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
interface_dict[interface]['medium'] = medium
continue
m = p8_1.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
encapsulation = encapsulation.replace("802.1q virtual lan","dot1q")
first_dot1q = str(m.groupdict()['first_dot1q'])
medium = m.groupdict()['medium']
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
interface_dict[interface]['encapsulations']\
['first_dot1q'] = first_dot1q
interface_dict[interface]['medium'] = medium
continue
# Encapsulation ARPA, loopback not set
m = p8_2.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
continue
#Port mode is routed
m = p9.match(line)
if m:
port_mode = m.groupdict()['port_mode']
interface_dict[interface]['port_mode'] = port_mode
continue
# auto-duplex, auto-speed
m = p10_1.match(line)
if m:
# not caring for this line
continue
#full-duplex, 1000 Mb/s
# auto-duplex, auto-speed
# full-duplex, 1000 Mb/s, media type is 1G
# auto-duplex, auto-speed, media type is 10G
m = p10.match(line)
if m:
duplex_mode = m.groupdict()['duplex_mode'].lower()
port_speed = m.groupdict()['port_speed']
if m.groupdict()['media_type']:
interface_dict[interface]['media_type'] = m.groupdict()['media_type']
else:
media_type = None
interface_dict[interface]['duplex_mode'] = duplex_mode
interface_dict[interface]['port_speed'] = port_speed
continue
#Beacon is turned off
m = p11.match(line)
if m:
beacon = m.groupdict()['beacon']
interface_dict[interface]['beacon'] = beacon
continue
#Auto-Negotiation is turned off
m = p12.match(line)
if m:
auto_negotiation = m.groupdict()['auto_negotiate']
interface_dict[interface]['auto_negotiate'] = False
continue
#Auto-Negotiation is turned on
m = p12_1.match(line)
if m:
auto_negotiation = m.groupdict()['auto_negotiate']
interface_dict[interface]['auto_negotiate'] = True
continue
#Input flow-control is off, output flow-control is off
m = p13.match(line)
if m:
receive = m.groupdict()['receive']
send = m.groupdict()['send']
if 'flow_control' not in interface_dict[interface]:
interface_dict[interface]['flow_control'] = {}
interface_dict[interface]['flow_control']['receive'] = False
interface_dict[interface]['flow_control']['send'] = False
continue
#Input flow-control is off, output flow-control is on
m = p13_1.match(line)
if m:
receive = m.groupdict()['receive']
send = m.groupdict()['send']
if 'flow_control' not in interface_dict[interface]:
interface_dict[interface]['flow_control'] = {}
interface_dict[interface]['flow_control']['receive'] = True
interface_dict[interface]['flow_control']['send'] = True
continue
#Auto-mdix is turned off
m = p14.match(line)
if m:
auto_mdix = m.groupdict()['auto_mdix']
interface_dict[interface]['auto_mdix'] = auto_mdix
continue
#Switchport monitor is off
m = p15.match(line)
if m:
switchport_monitor = m.groupdict()['switchport_monitor']
interface_dict[interface]['switchport_monitor'] = switchport_monitor
continue
#EtherType is 0x8100
m = p16.match(line)
if m:
ethertype = m.groupdict()['ethertype']
interface_dict[interface]['ethertype'] = ethertype
continue
# Members in this channel: Eth1/15, Eth1/16
# Members in this channel: Eth1/28
m = p38.match(line)
if m:
port_channel_member_intfs = m.groupdict()['port_channel_member_intfs']
if port_channel_member_intfs:
if 'port_channel' not in interface_dict[interface]:
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = True
interface_dict[interface]['port_channel']\
['port_channel_member_intfs'] = [Common.convert_intf_name(item) \
for item in port_channel_member_intfs.split(',')]
continue
#EEE (efficient-ethernet) : n/a
m = p17.match(line)
if m:
efficient_ethernet = m.groupdict()['efficient_ethernet']
interface_dict[interface]['efficient_ethernet'] = efficient_ethernet
continue
#Last link flapped 00:07:28
m = p18.match(line)
if m:
last_link_flapped = m.groupdict()['last_link_flapped']
interface_dict[interface]['last_link_flapped']\
= last_link_flapped
continue
# Last clearing of "show interface" counters never
m = p19.match(line)
if m:
last_clear = m.groupdict()['last_clear']
continue
# Last clearing of "" counters 00:15:42
m = p19_1.match(line)
if m:
last_clear = m.groupdict()['last_clear']
continue
#1 interface resets
m = p20.match(line)
if m:
interface_reset = int(m.groupdict()['interface_reset'])
interface_dict[interface]['interface_reset'] = interface_reset
continue
# 1 minute input rate 0 bits/sec, 0 packets/sec
m = p21.match(line)
if m:
load_interval = int(m.groupdict()['load_interval'])
in_rate = int(m.groupdict()['in_rate'])
in_rate_pkts = int(m.groupdict()['in_rate_pkts'])
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
if 'rate' not in interface_dict[interface]['counters']:
interface_dict[interface]['counters']['rate'] = {}
interface_dict[interface]['counters']['rate']\
['load_interval'] = load_interval
interface_dict[interface]['counters']['rate']\
['in_rate'] = in_rate
interface_dict[interface]['counters']['rate']\
['in_rate_pkts'] = in_rate_pkts
continue
#1 minute output rate 24 bits/sec, 0 packets/sec
m = p22.match(line)
if m:
load_interval = int(m.groupdict()['load_interval'])
out_rate = int(m.groupdict()['out_rate'])
out_rate_pkts = int(m.groupdict()['out_rate_pkts'])
interface_dict[interface]['counters']['rate']\
['load_interval'] = load_interval
interface_dict[interface]['counters']['rate']\
['out_rate'] = out_rate
interface_dict[interface]['counters']['rate']\
['out_rate_pkts'] = out_rate_pkts
continue
#input rate 0 bps, 0 pps; output rate 0 bps, 0 pps
m = p23.match(line)
if m:
in_rate_bps = int(m.groupdict()['in_rate_bps'])
in_rate_pps = int(m.groupdict()['in_rate_pps'])
out_rate_bps = int(m.groupdict()['out_rate_bps'])
out_rate_pps = int(m.groupdict()['out_rate_pps'])
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
if 'rate' not in interface_dict[interface]['counters']:
interface_dict[interface]['counters']['rate'] = {}
interface_dict[interface]['counters']['rate']\
['in_rate_bps'] = in_rate_bps
interface_dict[interface]['counters']['rate']\
['in_rate_pps'] = in_rate_pps
interface_dict[interface]['counters']['rate']\
['out_rate_bps'] = out_rate_bps
interface_dict[interface]['counters']['rate']\
['out_rate_pps'] = out_rate_pps
continue
# RX
m = p23_1.match(line)
if m:
rx = m.groupdict()['rx']
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['rx'] = True
continue
if rx:
#0 unicast packets 0 multicast packets 0 broadcast packets
m = p24.match(line)
if m:
in_unicast_pkts = int(m.groupdict()['in_unicast_pkts'])
in_multicast_pkts = int(m.groupdict()['in_multicast_pkts'])
in_broadcast_pkts = int(m.groupdict()['in_broadcast_pkts'])
interface_dict[interface]['counters']['in_unicast_pkts'] = in_unicast_pkts
interface_dict[interface]['counters']['in_multicast_pkts'] = in_multicast_pkts
interface_dict[interface]['counters']['in_broadcast_pkts'] = in_broadcast_pkts
try:
interface_dict[interface]['counters']['last_clear'] = last_clear
except Exception:
pass
continue
# 0 input packets 0 bytes
# 607382344 input packets 445986207 unicast packets 132485585 multicast packets
m = p25.match(line)
if m:
group = m.groupdict()
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['in_pkts'] = int(group['in_pkts'])
if group['in_octets']:
interface_dict[interface]['counters']['in_octets'] = int(group['in_octets'])
if group['in_unicast_pkts']:
interface_dict[interface]['counters']['in_unicast_pkts'] = int(group['in_unicast_pkts'])
if group['in_multicast_pkts']:
interface_dict[interface]['counters']['in_multicast_pkts'] = int(group['in_multicast_pkts'])
continue
# 28910552 broadcast packets 63295517997 bytes
m = p39.match(line)
if m:
in_octets = int(m.groupdict()['in_octets'])
interface_dict[interface]['counters']['in_octets'] = in_octets
in_broadcast_pkts = int(m.groupdict()['in_broadcast_pkts'])
interface_dict[interface]['counters']['in_broadcast_pkts'] = in_broadcast_pkts
#0 jumbo packets 0 storm suppression packets
m = p26.match(line)
if m:
in_jumbo_packets = int(m.groupdict()['in_jumbo_packets'])
in_storm_suppression_packets = int(m.groupdict()['in_storm_suppression_packets'])
interface_dict[interface]['counters']['in_jumbo_packets']= in_jumbo_packets
interface_dict[interface]['counters']\
['in_storm_suppression_packets'] = in_storm_suppression_packets
continue
#0 runts 0 giants 0 CRC/FCS 0 no buffer
#0 runts 0 giants 0 CRC 0 no buffer
m = p27.match(line)
if m:
interface_dict[interface]['counters']['in_runts'] = int(m.groupdict()['in_runts'])
interface_dict[interface]['counters']['in_oversize_frame'] = int(m.groupdict()['in_oversize_frame'])
interface_dict[interface]['counters']['in_crc_errors'] = int(m.groupdict()['in_crc_errors'])
interface_dict[interface]['counters']['in_no_buffer'] = int(m.groupdict()['in_no_buffer'])
continue
#0 input error 0 short frame 0 overrun 0 underrun 0 ignored
m = p28.match(line)
if m:
interface_dict[interface]['counters']['in_errors'] = int(m.groupdict()['in_errors'])
interface_dict[interface]['counters']['in_short_frame'] = int(m.groupdict()['in_short_frame'])
interface_dict[interface]['counters']['in_overrun'] = int(m.groupdict()['in_overrun'])
interface_dict[interface]['counters']['in_underrun'] = int(m.groupdict()['in_underrun'])
interface_dict[interface]['counters']['in_ignored'] = int(m.groupdict()['in_ignored'])
continue
#0 watchdog 0 bad etype drop 0 bad proto drop 0 if down drop
m = p29.match(line)
if m:
interface_dict[interface]['counters']['in_watchdog'] = int(m.groupdict()['in_watchdog'])
interface_dict[interface]['counters']['in_bad_etype_drop'] = int(m.groupdict()['in_bad_etype_drop'])
interface_dict[interface]['counters']['in_unknown_protos'] = int(m.groupdict()['in_unknown_protos'])
interface_dict[interface]['counters']['in_if_down_drop'] = int(m.groupdict()['in_if_down_drop'])
continue
# 0 input with dribble 0 input discard
m = p30.match(line)
if m:
in_with_dribble = int(m.groupdict()['in_with_dribble'])
in_discard = int(m.groupdict()['in_discard'])
interface_dict[interface]['counters']['in_with_dribble'] = in_with_dribble
interface_dict[interface]['counters']['in_discard'] = in_discard
continue
# 0 Rx pause
m = p31.match(line)
if m:
in_mac_pause_frames = int(m.groupdict()['in_mac_pause_frames'])
interface_dict[interface]['counters']['in_mac_pause_frames'] = in_mac_pause_frames
continue
# TX
m = p31_1.match(line)
if m:
rx = False
tx = m.groupdict()['tx']
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['tx'] = True
continue
if tx:
#0 unicast packets 0 multicast packets 0 broadcast packets
m = p32.match(line)
if m:
interface_dict[interface]['counters']['out_unicast_pkts'] = int(m.groupdict()['out_unicast_pkts'])
interface_dict[interface]['counters']['out_multicast_pkts'] = int(m.groupdict()['out_multicast_pkts'])
interface_dict[interface]['counters']['out_broadcast_pkts'] = int(m.groupdict()['out_broadcast_pkts'])
continue
#0 output packets 0 bytes
m = p33.match(line)
if m:
out_pkts = int(m.groupdict()['out_pkts'])
out_octets = int(m.groupdict()['out_octets'])
interface_dict[interface]['counters']['out_pkts'] = out_pkts
interface_dict[interface]['counters']['out_octets'] = out_octets
continue
#0 jumbo packets
m = p34.match(line)
if m:
out_jumbo_packets = int(m.groupdict()['out_jumbo_packets'])
interface_dict[interface]['counters']['out_jumbo_packets'] = out_jumbo_packets
continue
#0 output error 0 collision 0 deferred 0 late collision
m = p35.match(line)
if m:
interface_dict[interface]['counters']['out_errors'] = int(m.groupdict()['out_errors'])
interface_dict[interface]['counters']['out_collision'] = int(m.groupdict()['out_collision'])
interface_dict[interface]['counters']['out_deferred'] = int(m.groupdict()['out_deferred'])
interface_dict[interface]['counters']['out_late_collision'] = int(m.groupdict()['out_late_collision'])
continue
#0 lost carrier 0 no carrier 0 babble 0 output discard
m = p36.match(line)
if m:
interface_dict[interface]['counters']['out_lost_carrier'] = int(m.groupdict()['out_lost_carrier'])
interface_dict[interface]['counters']['out_no_carrier'] = int(m.groupdict()['out_no_carrier'])
interface_dict[interface]['counters']['out_babble'] = int(m.groupdict()['out_babble'])
interface_dict[interface]['counters']['out_discard'] = int(m.groupdict()['out_discard'])
continue
#0 Tx pause
m = p37.match(line)
if m:
out_mac_pause_frames = int(m.groupdict()['out_mac_pause_frames'])
interface_dict[interface]['counters']['out_mac_pause_frames'] = out_mac_pause_frames
continue
return interface_dict
# ===================================
# Schema for 'show interface vrf all'
# ===================================
class ShowIpInterfaceVrfAllSchema(MetaParser):
"""Schema for show ip interface vrf all"""
schema = {
Any():
{'vrf': str,
'interface_status': str,
'iod': int,
Optional('ipv4'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('secondary'): bool,
Optional('route_tag'): str,
Optional('ip_subnet'): str,
Optional('broadcast_address'): str,
Optional('route_preference'): str,
},
Optional('unnumbered'):
{'interface_ref': str,
},
'counters':
{'unicast_packets_sent': int,
'unicast_packets_received': int,
'unicast_packets_forwarded': int,
'unicast_packets_originated': int,
'unicast_packets_consumed': int,
'unicast_bytes_sent': int,
'unicast_bytes_received': int,
'unicast_bytes_forwarded': int,
'unicast_bytes_originated': int,
'unicast_bytes_consumed': int,
'multicast_packets_sent': int,
'multicast_packets_received': int,
'multicast_packets_forwarded': int,
'multicast_packets_originated': int,
'multicast_packets_consumed': int,
'multicast_bytes_sent': int,
'multicast_bytes_received': int,
'multicast_bytes_forwarded': int,
'multicast_bytes_originated': int,
'multicast_bytes_consumed': int,
'broadcast_packets_sent': int,
'broadcast_packets_received': int,
'broadcast_packets_forwarded': int,
'broadcast_packets_originated': int,
'broadcast_packets_consumed': int,
'broadcast_bytes_sent': int,
'broadcast_bytes_received': int,
'broadcast_bytes_forwarded': int,
'broadcast_bytes_originated': int,
'broadcast_bytes_consumed': int,
'labeled_packets_sent': int,
'labeled_packets_received': int,
'labeled_packets_forwarded': int,
'labeled_packets_originated': int,
'labeled_packets_consumed': int,
'labeled_bytes_sent': int,
'labeled_bytes_received': int,
'labeled_bytes_forwarded': int,
'labeled_bytes_originated': int,
'labeled_bytes_consumed': int,
},
},
Optional('multicast_groups'): list,
Optional('multicast_groups_address'): str,
'ip_mtu': int,
'proxy_arp': str,
'local_proxy_arp': str,
'multicast_routing': str,
'icmp_redirects': str,
'directed_broadcast': str,
Optional('ip_forwarding'): str,
'icmp_unreachable': str,
'icmp_port_unreachable': str,
'unicast_reverse_path': str,
'load_sharing': str,
'int_stat_last_reset': str,
'wccp_redirect_outbound': str,
'wccp_redirect_inbound': str,
'wccp_redirect_exclude': str
},
}
# ===================================
# Parser for 'show interface vrf all'
# ===================================
class ShowIpInterfaceVrfAll(ShowIpInterfaceVrfAllSchema):
"""Parser for show ip interface vrf all
show ip interface vrf <vrf>
show ip interface <interface> vrf all
show ip interface <interface> vrf <vrf>"""
cli_command = ['show ip interface {interface} vrf {vrf}', 'show ip interface {interface} vrf all',
'show ip interface vrf {vrf}', 'show ip interface vrf all']
exclude = [
'multicast_bytes_consumed',
'multicast_bytes_received',
'unicast_bytes_consumed',
'unicast_packets_consumed',
'unicast_bytes_originated',
'unicast_packets_originated',
'unicast_bytes_received',
'unicast_bytes_sent',
'unicast_packets_received',
'unicast_packets_sent',
'multicast_packets_consumed',
'multicast_packets_received',
'multicast_bytes_originated',
'multicast_bytes_sent',
'multicast_packets_originated',
'multicast_packets_sent',
'broadcast_bytes_consumed',
'broadcast_bytes_received',
'broadcast_packets_consumed',
'broadcast_packets_received',
'multicast_groups',
'int_stat_last_reset',
'unicast_bytes_forwarded',
'unicast_packets_forwarded',
'oil_uptime',
'iod',
'(tunnel.*)',
'multicast_groups_address']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
del interface # delete this to prevent use from below due to scope
ip_interface_vrf_all_dict = {}
temp_intf = []
for line in out.splitlines():
line = line.rstrip()
# IP Interface Status for VRF "VRF1"
p1 = re.compile(r'^\s*IP *Interface *Status *for *VRF'
' *(?P<vrf>\S+)$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
vrf = vrf.replace('"',"")
continue
#Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36,
p2 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\-\.]+), *Interface'
' *status: *(?P<interface_status>[a-z\-\/\s]+),'
' *iod: *(?P<iod>[0-9]+),$')
m = p2.match(line)
if m:
interface = m.groupdict()['interface']
interface_status = m.groupdict()['interface_status']
iod = int(m.groupdict()['iod'])
if interface not in ip_interface_vrf_all_dict:
ip_interface_vrf_all_dict[interface] = {}
ip_interface_vrf_all_dict[interface]['interface_status']\
= interface_status
ip_interface_vrf_all_dict[interface]['iod'] = iod
ip_interface_vrf_all_dict[interface]['vrf'] = vrf
#init multicast groups list to empty for this interface
multicast_groups = []
unnumbered_intf = None
# unnumbered interface didn't share the same information
temp_intf = None
# check if the ipv4 and address already assgined during the unnumbered block
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
for key in ip_interface_vrf_all_dict[interface]['ipv4'].keys():
if re.match('^\d+.\d+.\d+.\d+\/\d+', key):
address = key
continue
# Unnumbered interfaces of loopback0: first iod 46
p2_1 = re.compile(r'^\s*Unnumbered +interfaces +of +(?P<unnumbered_intf>[\w\.\/]+): *'
'first +iod +(?P<first_iod>\d+)$')
m = p2_1.match(line)
if m:
unnumbered_intf = m.groupdict()['unnumbered_intf']
continue
# Ethernet2/11:
# mti18: tunnel-te11: tunnel-te12:
p2_2 = re.compile(r'(([E|e]thernet|[L|l]oopback|[T|t]unnel|[V|v]lan|mti|[t|T]unnel-te|[p|P]ort-channel)[\d\/\.]+):')
m = p2_2.findall(line)
if m and unnumbered_intf:
temp_intf = []
temp_intf = [i[0] for i in m]
for intf in temp_intf:
if intf not in ip_interface_vrf_all_dict:
ip_interface_vrf_all_dict[intf] = {}
continue
# IP address: 10.4.4.4, IP subnet: 10.4.4.0/24 secondary
# IP address: 10.64.4.4, IP subnet: 10.64.4.0/24
p3 = re.compile(r'^\s*IP *address: *(?P<ip>[0-9\.]+), *IP'
' *subnet: *(?P<ip_subnet>[a-z0-9\.]+)\/'
'(?P<prefix_length>[0-9]+)'
' *(?P<secondary>(secondary))?$')
m = p3.match(line)
if m:
ip = m.groupdict()['ip']
ip_subnet = m.groupdict()['ip_subnet']
prefix_length = m.groupdict()['prefix_length']
secondary = m.groupdict()['secondary']
address = ip + '/' + prefix_length
if temp_intf:
temp_intf.append(interface)
intf_lst = temp_intf
else:
intf_lst = [interface]
for intf in intf_lst:
if 'ipv4' not in ip_interface_vrf_all_dict[intf]:
ip_interface_vrf_all_dict[intf]['ipv4'] = {}
if address not in ip_interface_vrf_all_dict[intf]['ipv4']:
ip_interface_vrf_all_dict[intf]['ipv4'][address] = {}
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip'] = ip
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip_subnet'] = ip_subnet
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['prefix_length'] = prefix_length
if secondary:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['secondary'] = True
else:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['secondary'] = False
continue
# IP address: 192.168.106.1, IP subnet: 192.168.106.0/24 route-preference: 0, tag: 0
p3_1 = re.compile(r'^\s*IP *address: *(?P<ip>[0-9\.]+), *IP'
' *subnet: *(?P<ip_subnet>[a-z0-9\.]+)\/'
'(?P<prefix_length>[0-9\,]+)(?: *route-preference:'
' *(?P<route_preference>[0-9]+),)?(?: *tag:'
' *(?P<route_tag>[0-9]+))?$')
m = p3_1.match(line)
if m:
ip = m.groupdict()['ip']
ip_subnet = m.groupdict()['ip_subnet']
prefix_length = m.groupdict()['prefix_length']
route_tag = m.groupdict()['route_tag']
route_preference = m.groupdict()['route_preference']
address = ip + '/' + prefix_length
if temp_intf:
temp_intf.append(interface)
intf_lst = temp_intf
# unnumbered interface didn't share the same information
temp_intf = None
else:
intf_lst = [interface]
for intf in intf_lst:
if 'ipv4' not in ip_interface_vrf_all_dict[intf]:
ip_interface_vrf_all_dict[intf]['ipv4'] = {}
if address not in ip_interface_vrf_all_dict[intf]['ipv4']:
ip_interface_vrf_all_dict[intf]['ipv4'][address] = {}
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip'] = ip
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip_subnet'] = ip_subnet
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['prefix_length'] = prefix_length
if route_tag:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['route_tag'] = route_tag
if route_preference:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['route_preference'] = route_preference
continue
#IP broadcast address: 255.255.255.255
p4 = re.compile(r'^\s*IP *broadcast *address:'
' *(?P<broadcast_address>[0-9\.]+)$')
m = p4.match(line)
if m:
broadcast_address = str(m.groupdict()['broadcast_address'])
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
ip_interface_vrf_all_dict[interface]['ipv4'][address]['broadcast_address'] = broadcast_address
continue
#IP multicast groups locally joined: none
#224.0.0.6 224.0.0.5 224.0.0.2
p5 = re.compile(r'^\s*IP *multicast *groups *locally *joined:'
' *(?P<multicast_groups_address>[a-z]+)$')
m = p5.match(line)
if m:
multicast_groups_address = m.groupdict()['multicast_groups_address']
ip_interface_vrf_all_dict[interface]['multicast_groups_address']\
= multicast_groups_address
continue
#224.0.0.6 224.0.0.5 224.0.0.2
p5_1 = re.compile(r'^\s*(?P<multicast_groups_address>[a-z0-9\.\s]+)$')
m = p5_1.match(line)
if m:
multicast_groups_address = str(m.groupdict()['multicast_groups_address'])
#Split string of addressed into a list
multicast_groups_address = [str(i) for i in multicast_groups_address.split()]
#Add to previous created list
for mgroup in multicast_groups_address:
multicast_groups.append(mgroup)
ip_interface_vrf_all_dict[interface]['multicast_groups']\
= sorted(multicast_groups)
continue
#IP MTU: 1600 bytes (using link MTU)
p6 = re.compile(r'^\s*IP *MTU: *(?P<ip_mtu>[0-9]+)'
' *bytes *\(using *link *MTU\)$')
m = p6.match(line)
if m:
ip_mtu = int(m.groupdict()['ip_mtu'])
ip_interface_vrf_all_dict[interface]['ip_mtu'] = ip_mtu
continue
#IP primary address route-preference: 0, tag: 0
p7 = re.compile(r'^\s*IP *primary *address *route-preference:'
' *(?P<route_preference>[0-9]+), *tag:'
' *(?P<route_tag>[0-9]+)$')
m = p7.match(line)
if m:
route_preference = m.groupdict()['route_preference']
route_tag = m.groupdict()['route_tag']
if route_preference:
ip_interface_vrf_all_dict[interface]['ipv4'][address]['route_preference']\
= route_preference
if route_tag:
ip_interface_vrf_all_dict[interface]['ipv4'][address]\
['route_tag'] = route_tag
continue
#IP proxy ARP : disabled
p8 = re.compile(r'^\s*IP *proxy *ARP *: *(?P<proxy_arp>[a-z]+)$')
m = p8.match(line)
if m:
proxy_arp = m.groupdict()['proxy_arp']
ip_interface_vrf_all_dict[interface]['proxy_arp'] = proxy_arp
continue
#IP Local Proxy ARP : disabled
p9 = re.compile(r'^\s*IP *Local *Proxy *ARP *:'
' *(?P<local_proxy_arp>[a-z]+)$')
m = p9.match(line)
if m:
local_proxy_arp = m.groupdict()['local_proxy_arp']
ip_interface_vrf_all_dict[interface]['local_proxy_arp']\
= local_proxy_arp
continue
#IP multicast routing: disabled
p10 = re.compile(r'^\s*IP *multicast *routing:'
' *(?P<multicast_routing>[a-z]+)$')
m = p10.match(line)
if m:
multicast_routing = m.groupdict()['multicast_routing']
ip_interface_vrf_all_dict[interface]['multicast_routing']\
= multicast_routing
continue
#IP icmp redirects: disabled
p11 = re.compile(r'^\s*IP *icmp *redirects:'
' *(?P<icmp_redirects>[a-z]+)$')
m = p11.match(line)
if m:
icmp_redirects = m.groupdict()['icmp_redirects']
ip_interface_vrf_all_dict[interface]['icmp_redirects']\
= icmp_redirects
continue
#IP directed-broadcast: disabled
p12 = re.compile(r'^\s*IP directed-broadcast:'
' *(?P<directed_broadcast>[a-z]+)$')
m = p12.match(line)
if m:
directed_broadcast = m.groupdict()['directed_broadcast']
ip_interface_vrf_all_dict[interface]['directed_broadcast']\
= directed_broadcast
continue
#IP Forwarding: disabled
p13 = re.compile(r'^\s*IP *Forwarding: *(?P<ip_forwarding>[a-z]+)$')
m = p13.match(line)
if m:
ip_forwarding = m.groupdict()['ip_forwarding']
ip_interface_vrf_all_dict[interface]['ip_forwarding']\
= ip_forwarding
continue
#IP icmp unreachables (except port): disabled
p14 = re.compile(r'^\s*IP *icmp *unreachables *\(except *port\):'
' *(?P<icmp_unreachable>[a-z]+)$')
m = p14.match(line)
if m:
icmp_unreachable = m.groupdict()['icmp_unreachable']
ip_interface_vrf_all_dict[interface]['icmp_unreachable']\
= icmp_unreachable
continue
#IP icmp port-unreachable: enabled
p15 = re.compile(r'^\s*IP *icmp *port-unreachable:'
' *(?P<icmp_port_unreachable>[a-z]+)$')
m = p15.match(line)
if m:
icmp_port_unreachable = m.groupdict()['icmp_port_unreachable']
ip_interface_vrf_all_dict[interface]['icmp_port_unreachable']\
= icmp_port_unreachable
continue
#IP unicast reverse path forwarding: none
p16 = re.compile(r'^\s*IP *unicast *reverse *path *forwarding:'
' *(?P<unicast_reverse_path>\w+)$')
m = p16.match(line)
if m:
unicast_reverse_path = m.groupdict()['unicast_reverse_path']
ip_interface_vrf_all_dict[interface]['unicast_reverse_path']\
= unicast_reverse_path
continue
#IP load sharing: none
p17 = re.compile(r'^\s*IP *load *sharing: *(?P<load_sharing>\w+)$')
m = p17.match(line)
if m:
load_sharing = m.groupdict()['load_sharing']
ip_interface_vrf_all_dict[interface]['load_sharing']\
= load_sharing
continue
#IP interface statistics last reset: never
p18 = re.compile(r'^\s*IP *interface *statistics *last *reset:'
' *(?P<int_stat_last_reset>[a-zA-Z0-9\:]+)')
m = p18.match(line)
if m:
int_stat_last_reset = m.groupdict()['int_stat_last_reset']
ip_interface_vrf_all_dict[interface]['int_stat_last_reset']\
= int_stat_last_reset
continue
# IP interface software stats: (sent/received/forwarded/originated/consumed)
# Unicast packets : 0/0/0/0/0
# Unicast bytes : 0/0/0/0/0
# Multicast packets : 0/0/0/0/0
# Multicast bytes : 0/0/0/0/0
# Broadcast packets : 0/0/0/0/0
# Broadcast bytes : 0/0/0/0/0
# Labeled packets : 0/0/0/0/0
# Labeled bytes : 0/0/0/0/0
try:
interface
except Exception:
continue
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
#Unicast packets : 0/0/0/0/0
p20 = re.compile(r'^\s*Unicast *packets *:'
' *(?P<unicast_packets_sent>[0-9]+)\/'
'(?P<unicast_packets_received>[0-9]+)\/'
'(?P<unicast_packets_forwarded>[0-9]+)\/'
'(?P<unicast_packets_originated>[0-9]+)\/'
'(?P<unicast_packets_consumed>[0-9]+)$')
m = p20.match(line)
if m:
if 'counters' not in ip_interface_vrf_all_dict[interface]['ipv4'][address]:
ip_interface_vrf_all_dict[interface]['ipv4']['counters'] = {}
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_sent']= int(m.groupdict()['unicast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_received']= int(m.groupdict()['unicast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_forwarded']= int(m.groupdict()['unicast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_originated']= int(m.groupdict()['unicast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_consumed']= int(m.groupdict()['unicast_packets_consumed'])
continue
#Unicast bytes : 0/0/0/0/0
p21 = re.compile(r'^\s*Unicast *bytes *:'
' *(?P<unicast_bytes_sent>[0-9]+)\/'
'(?P<unicast_bytes_received>[0-9]+)\/'
'(?P<unicast_bytes_forwarded>[0-9]+)\/'
'(?P<unicast_bytes_originated>[0-9]+)\/'
'(?P<unicast_bytes_consumed>[0-9]+)$')
m = p21.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_sent']= int(m.groupdict()['unicast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_received']= int(m.groupdict()['unicast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_forwarded']= int(m.groupdict()['unicast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_originated']= int(m.groupdict()['unicast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_consumed']= int(m.groupdict()['unicast_bytes_consumed'])
continue
#Multicast packets : 0/0/0/0/0
p22 = re.compile(r'^\s*Multicast *packets *:'
' *(?P<multicast_packets_sent>[0-9]+)\/'
'(?P<multicast_packets_received>[0-9]+)\/'
'(?P<multicast_packets_forwarded>[0-9]+)\/'
'(?P<multicast_packets_originated>[0-9]+)\/'
'(?P<multicast_packets_consumed>[0-9]+)$')
m = p22.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_sent']= int(m.groupdict()['multicast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_received']= int(m.groupdict()['multicast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_forwarded']= int(m.groupdict()['multicast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_originated']= int(m.groupdict()['multicast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_consumed']= int(m.groupdict()['multicast_packets_consumed'])
continue
#Multicast bytes : 0/0/0/0/0
p23 = re.compile(r'^\s*Multicast *bytes *:'
' *(?P<multicast_bytes_sent>[0-9]+)\/'
'(?P<multicast_bytes_received>[0-9]+)\/'
'(?P<multicast_bytes_forwarded>[0-9]+)\/'
'(?P<multicast_bytes_originated>[0-9]+)\/'
'(?P<multicast_bytes_consumed>[0-9]+)$')
m = p23.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_sent']= int(m.groupdict()['multicast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_received']= int(m.groupdict()['multicast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_forwarded']= int(m.groupdict()['multicast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_originated']= int(m.groupdict()['multicast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_consumed']= int(m.groupdict()['multicast_bytes_consumed'])
continue
#Broadcast packets : 0/0/0/0/0
p24 = re.compile(r'^\s*Broadcast *packets *:'
' *(?P<broadcast_packets_sent>[0-9]+)\/'
'(?P<broadcast_packets_received>[0-9]+)\/'
'(?P<broadcast_packets_forwarded>[0-9]+)\/'
'(?P<broadcast_packets_originated>[0-9]+)\/'
'(?P<broadcast_packets_consumed>[0-9]+)$')
m = p24.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_sent']= int(m.groupdict()['broadcast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_received']= int(m.groupdict()['broadcast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_forwarded']= int(m.groupdict()['broadcast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_originated']= int(m.groupdict()['broadcast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_consumed']= int(m.groupdict()['broadcast_packets_consumed'])
continue
#Broadcast bytes : 0/0/0/0/0
p25 = re.compile(r'^\s*Broadcast *bytes *:'
' *(?P<broadcast_bytes_sent>[0-9]+)\/'
'(?P<broadcast_bytes_received>[0-9]+)\/'
'(?P<broadcast_bytes_forwarded>[0-9]+)\/'
'(?P<broadcast_bytes_originated>[0-9]+)\/'
'(?P<broadcast_bytes_consumed>[0-9]+)$')
m = p25.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_sent']= int(m.groupdict()['broadcast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_received']= int(m.groupdict()['broadcast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_forwarded']= int(m.groupdict()['broadcast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_originated']= int(m.groupdict()['broadcast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_consumed']= int(m.groupdict()['broadcast_bytes_consumed'])
continue
#Labeled packets : 0/0/0/0/0
p26 = re.compile(r'^\s*Labeled *packets *:'
' *(?P<labeled_packets_sent>[0-9]+)\/'
'(?P<labeled_packets_received>[0-9]+)\/'
'(?P<labeled_packets_forwarded>[0-9]+)\/'
'(?P<labeled_packets_originated>[0-9]+)\/'
'(?P<labeled_packets_consumed>[0-9]+)$')
m = p26.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_sent']= int(m.groupdict()['labeled_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_received']= int(m.groupdict()['labeled_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_forwarded']= int(m.groupdict()['labeled_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_originated']= int(m.groupdict()['labeled_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_consumed']= int(m.groupdict()['labeled_packets_consumed'])
continue
#Labeled bytes : 0/0/0/0/0
p27 = re.compile(r'^\s*Labeled *bytes *:'
' *(?P<labeled_bytes_sent>[0-9]+)\/'
'(?P<labeled_bytes_received>[0-9]+)\/'
'(?P<labeled_bytes_forwarded>[0-9]+)\/'
'(?P<labeled_bytes_originated>[0-9]+)\/'
'(?P<labeled_bytes_consumed>[0-9]+)$')
m = p27.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_sent']= int(m.groupdict()['labeled_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_received']= int(m.groupdict()['labeled_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_forwarded']= int(m.groupdict()['labeled_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_originated']= int(m.groupdict()['labeled_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_consumed']= int(m.groupdict()['labeled_bytes_consumed'])
continue
#WCCP Redirect outbound: disabled
p28 = re.compile(r'^\s*WCCP *Redirect *outbound:'
' *(?P<wccp_redirect_outbound>[a-z]+)$')
m = p28.match(line)
if m:
wccp_redirect_outbound = m.groupdict()['wccp_redirect_outbound']
ip_interface_vrf_all_dict[interface]['wccp_redirect_outbound']\
= wccp_redirect_outbound
continue
#WCCP Redirect inbound: disabled
p29 = re.compile(r'^\s*WCCP *Redirect *inbound:'
' *(?P<wccp_redirect_inbound>[a-z]+)$')
m = p29.match(line)
if m:
wccp_redirect_inbound = m.groupdict()['wccp_redirect_inbound']
ip_interface_vrf_all_dict[interface]['wccp_redirect_inbound']\
= wccp_redirect_inbound
continue
#WCCP Redirect exclude: disabled
p30 = re.compile(r'^\s*WCCP *Redirect *exclude:'
' *(?P<wccp_redirect_exclude>[a-z]+)$')
m = p30.match(line)
if m:
wccp_redirect_exclude = m.groupdict()['wccp_redirect_exclude']
ip_interface_vrf_all_dict[interface]['wccp_redirect_exclude']\
= wccp_redirect_exclude
continue
# IP unnumbered interface (loopback0)
p31 = re.compile(r'^\s*IP +unnumbered +interface +\((?P<unnum_intf>[\w\/\.]+)\)$')
m = p31.match(line)
if m:
unnum_intf = m.groupdict()['unnum_intf']
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
ip_interface_vrf_all_dict[interface]['ipv4']['unnumbered'] = {}
ip_interface_vrf_all_dict[interface]['ipv4']['unnumbered']['interface_ref']\
= unnum_intf
continue
return ip_interface_vrf_all_dict
# ===================================
# Schema for 'show vrf all interface'
# ===================================
class ShowVrfAllInterfaceSchema(MetaParser):
"""Schema for show vrf all interface"""
schema = {
Any():
{'vrf': str,
'vrf_id': int,
'site_of_origin': str
},
}
# ===================================
# Parser for 'show vrf all interface'
# ===================================
class ShowVrfAllInterface(ShowVrfAllInterfaceSchema):
"""Parser for show vrf all interface
show vrf <vrf> interface <interface>
show vrf <vrf> interface
show vrf all interface <interface>"""
cli_command = ['show vrf {vrf} interface {interface}',
'show vrf all interface {interface}',
'show vrf {vrf} interface', 'show vrf all interface']
exclude = [
'(Null.*)']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
vrf_all_interface_dict = {}
for line in out.splitlines():
line = line.rstrip()
# Interface VRF-Name VRF-ID Site-of-Origin
# Ethernet2/1 VRF1 3 --
# Null0 default 1 --
# Ethernet2/1.10 default 1 --
# Ethernet2/1.20 default 1 --
# Ethernet2/4 default 1 --
# Ethernet2/5 default 1 --
# Ethernet2/6 default 1 --
p1 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\.\/]+)'
' *(?P<vrf>[a-zA-Z0-9]+)'
' *(?P<vrf_id>[0-9]+)'
' *(?P<site_of_origin>[a-zA-Z\-]+)$')
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
vrf = m.groupdict()['vrf']
vrf_id = int(m.groupdict()['vrf_id'])
site_of_origin = m.groupdict()['site_of_origin']
if interface not in vrf_all_interface_dict:
vrf_all_interface_dict[interface] = {}
vrf_all_interface_dict[interface]['vrf'] = vrf
vrf_all_interface_dict[interface]['vrf_id'] = vrf_id
vrf_all_interface_dict[interface]\
['site_of_origin'] = site_of_origin
return vrf_all_interface_dict
# ======================================
# Schema for 'show interface switchport'
# ======================================
class ShowInterfaceSwitchportSchema(MetaParser):
"""Schema for show interface switchport"""
schema = {
Any():
{'switchport_status': str,
Optional('switchport_monitor'): str,
Optional('switchport_mode'): str,
Optional('access_vlan'): int,
'switchport_enable': bool,
Optional('access_vlan_mode'): str,
Optional('native_vlan'): int,
Optional('native_vlan_mode'): str,
Optional('trunk_vlans'): str,
Optional('admin_priv_vlan_primary_host_assoc'): str,
Optional('admin_priv_vlan_secondary_host_assoc'): str,
Optional('admin_priv_vlan_primary_mapping'): str,
Optional('admin_priv_vlan_secondary_mapping'): str,
Optional('admin_priv_vlan_trunk_native_vlan'): str,
Optional('admin_priv_vlan_trunk_encapsulation'): str,
Optional('admin_priv_vlan_trunk_normal_vlans'): str,
Optional('admin_priv_vlan_trunk_private_vlans'): str,
Optional('operational_private_vlan'): str
},
}
# ======================================
# Parser for 'show interface switchport'
# ======================================
class ShowInterfaceSwitchport(ShowInterfaceSwitchportSchema):
"""Parser for show interface switchport
show interface <interface> switchport"""
cli_command =['show interface switchport', 'show interface {interface} switchport']
def cli(self, interface="", output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
interface_switchport_dict = {}
for line in out.splitlines():
line = line.rstrip()
#Name: Ethernet2/2
p1 = re.compile(r'^\s*Name: *(?P<interface>[a-zA-Z0-9\/\-\.]+)$')
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
if interface not in interface_switchport_dict:
interface_switchport_dict[interface] = {}
continue
#Switchport: Enabled
p2 = re.compile(r'^\s*Switchport: *(?P<switchport_status>[a-zA-Z\s]+)$')
m = p2.match(line)
if m:
switchport_status = m.groupdict()['switchport_status'].lower()
interface_switchport_dict[interface]['switchport_status'] = switchport_status
interface_switchport_dict[interface]['switchport_enable'] = True \
if 'enable' in switchport_status else False
continue
#Switchport Monitor: Not enabled
p3 = re.compile(r'^\s*Switchport *Monitor: *(?P<switchport_monitor>[a-zA-Z\s]+)$')
m = p3.match(line)
if m:
switchport_monitor = m.groupdict()['switchport_monitor']
interface_switchport_dict[interface]['switchport_monitor'] = switchport_monitor
continue
# Operational Mode: Private-vlan host
p4 = re.compile(r'^\s*Operational *Mode: *(?P<switchport_mode>[\w\s-]+)$')
m = p4.match(line)
if m:
interface_switchport_dict[interface]['switchport_mode'] = m.groupdict()['switchport_mode']
continue
# Access Mode VLAN: 1 (default)
# Access Mode VLAN: 7 (server-vlan7)
# Access Mode VLAN: 551 (Test_VM_192.168.1.0/24)
p5 = re.compile(r'^\s*Access *Mode *VLAN: *(?P<access_vlan>[0-9]+)'
'(?: *\((?P<access_vlan_mode>[\S\s]+)\))?$')
m = p5.match(line)
if m:
access_vlan = int(m.groupdict()['access_vlan'])
access_vlan_mode = m.groupdict()['access_vlan_mode']
interface_switchport_dict[interface]\
['access_vlan'] = access_vlan
interface_switchport_dict[interface]\
['access_vlan_mode'] = access_vlan_mode
continue
# Trunking Native Mode VLAN: 1 (default)
# Trunking Native Mode VLAN: 200 (VLAN0200)
# Trunking Native Mode VLAN: 3967 (Vlan not created)
# Trunking Native Mode VLAN: 451 (VM_Machines_192.168.1.0/24)
p6 = re.compile(r'^\s*Trunking *Native *Mode *VLAN:'
' *(?P<native_vlan>[0-9]+)'
' *\((?P<native_vlan_mode>[\S\s]+)\)$')
m = p6.match(line)
if m:
native_vlan = int(m.groupdict()['native_vlan'])
native_vlan_mode = m.groupdict()['native_vlan_mode']
interface_switchport_dict[interface]\
['native_vlan'] = native_vlan
interface_switchport_dict[interface]\
['native_vlan_mode'] = native_vlan_mode
continue
#Trunking VLANs Allowed: 100,300
p7 = re.compile(r'^\s*Trunking *VLANs *Allowed: *(?P<trunk_vlans>[0-9\,\-]+)$')
m = p7.match(line)
if m:
trunk_vlans = m.groupdict()['trunk_vlans']
interface_switchport_dict[interface]['trunk_vlans'] = trunk_vlans
continue
# Administrative private-vlan primary host-association: 2000
p8 = re.compile(r'^\s*Administrative *private-vlan *primary'
' *host-association:'
' *(?P<admin_priv_vlan_primary_host_assoc>\w+)$')
m = p8.match(line)
if m:
admin_priv_vlan_primary_host_assoc = m.groupdict()['admin_priv_vlan_primary_host_assoc']
interface_switchport_dict[interface]['admin_priv_vlan_primary_host_assoc'] = admin_priv_vlan_primary_host_assoc
continue
# Administrative private-vlan secondary host-association: 110
p9 = re.compile(r'^\s*Administrative *private-vlan *secondary'
' *host-association:'
' *(?P<admin_priv_vlan_secondary_host_assoc>\w+)$')
m = p9.match(line)
if m:
admin_priv_vlan_secondary_host_assoc\
= m.groupdict()['admin_priv_vlan_secondary_host_assoc']
interface_switchport_dict[interface]\
['admin_priv_vlan_secondary_host_assoc'] = admin_priv_vlan_secondary_host_assoc
continue
#Administrative private-vlan primary mapping: none
p10 = re.compile(r'^\s*Administrative *private-vlan *primary'
' *mapping:'
' *(?P<admin_priv_vlan_primary_mapping>\w+)$')
m = p10.match(line)
if m:
admin_priv_vlan_primary_mapping\
= m.groupdict()['admin_priv_vlan_primary_mapping']
interface_switchport_dict[interface]\
['admin_priv_vlan_primary_mapping']\
= admin_priv_vlan_primary_mapping
continue
#Administrative private-vlan secondary mapping: none
p11 = re.compile(r'^\s*Administrative *private-vlan *secondary'
' *mapping:'
' *(?P<admin_priv_vlan_secondary_mapping>\w+)$')
m = p11.match(line)
if m:
admin_priv_vlan_secondary_mapping = m.groupdict()['admin_priv_vlan_secondary_mapping']
interface_switchport_dict[interface]\
['admin_priv_vlan_secondary_mapping'] = admin_priv_vlan_secondary_mapping
continue
#Administrative private-vlan trunk native VLAN: 1
p12 = re.compile(r'^\s*Administrative *private-vlan *trunk *native'
' *VLAN:'
' *(?P<admin_priv_vlan_trunk_native_vlan>\w+)$')
m = p12.match(line)
if m:
admin_priv_vlan_trunk_native_vlan = m.groupdict()['admin_priv_vlan_trunk_native_vlan']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_native_vlan'] = admin_priv_vlan_trunk_native_vlan
continue
#Administrative private-vlan trunk encapsulation: dot1q
p13 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *encapsulation:'
' *(?P<admin_priv_vlan_trunk_encapsulation>[a-z0-9]+)$')
m = p13.match(line)
if m:
admin_priv_vlan_trunk_encapsulation = m.groupdict()['admin_priv_vlan_trunk_encapsulation']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_encapsulation'] = admin_priv_vlan_trunk_encapsulation
continue
#Administrative private-vlan trunk normal VLANs: none
p14 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *normal VLANs:'
' *(?P<admin_priv_vlan_trunk_normal_vlans>\w+)$')
m = p14.match(line)
if m:
admin_priv_vlan_trunk_normal_vlans = m.groupdict()['admin_priv_vlan_trunk_normal_vlans']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_normal_vlans'] = admin_priv_vlan_trunk_normal_vlans
continue
# Administrative private-vlan trunk private VLANs: none
# Administrative private-vlan trunk private VLANs: none(0 none)
p15 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *private VLANs:'
' *(?P<admin_priv_vlan_trunk_private_vlans>\w+)(?P<dummy>.*)?$')
m = p15.match(line)
if m:
admin_priv_vlan_trunk_private_vlans = m.groupdict()['admin_priv_vlan_trunk_private_vlans']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_private_vlans'] = admin_priv_vlan_trunk_private_vlans
continue
# Operational private-vlan: (2500,101)
p16 = re.compile(r'^\s*Operational *private-vlan:'
' *(?P<operational_private_vlan>\S+)$')
m = p16.match(line)
if m:
operational_private_vlan = m.groupdict()['operational_private_vlan']
interface_switchport_dict[interface]\
['operational_private_vlan'] = operational_private_vlan
continue
return interface_switchport_dict
# ========================================
# Schema for 'show ipv6 interface vrf all'
# ========================================
class ShowIpv6InterfaceVrfAllSchema(MetaParser):
"""Schema for show ipv6 interface vrf all"""
schema = {
Any():
{'vrf': str,
'interface_status': str,
'iod': int,
'enabled': bool,
Optional('ipv6'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('anycast'): bool,
Optional('status'): str,
},
'counters':
{'unicast_packets_forwarded': int,
'unicast_packets_originated': int,
'unicast_packets_consumed': int,
'unicast_bytes_forwarded': int,
'unicast_bytes_originated': int,
'unicast_bytes_consumed': int,
'multicast_packets_forwarded': int,
'multicast_packets_originated': int,
'multicast_packets_consumed': int,
'multicast_bytes_forwarded': int,
'multicast_bytes_originated': int,
'multicast_bytes_consumed': int,
},
Optional('ipv6_subnet'): str,
'ipv6_link_local': str,
'ipv6_link_local_state': str,
'ipv6_ll_state': str,
Optional('ipv6_virtual_add'): str,
Optional('ipv6_virtual_groups'): list,
Optional('virtual_add'): bool,
Optional('multicast_groups'): bool,
'ipv6_multicast_routing': str,
'ipv6_report_link_local': str,
'ipv6_forwarding_feature': str,
Optional('ipv6_multicast_groups'): list,
Optional('ipv6_multicast_entries'): str,
'ipv6_mtu': int,
'ipv6_unicast_rev_path_forwarding': str,
'ipv6_load_sharing': str,
'ipv6_last_reset': str
},
},
}
# ========================================
# Parser for 'show ipv6 interface vrf all'
# ========================================
class ShowIpv6InterfaceVrfAll(ShowIpv6InterfaceVrfAllSchema):
"""Parser for show ipv6 interface vrf all
show ipv6 interface vrf <vrf>
show ipv6 interface <interface> vrf all
show ipv6 interface <interface> vrf <vrf>"""
cli_command = ['show ipv6 interface {interface} vrf {vrf}', 'show ipv6 interface {interface} vrf all',
'show ipv6 interface vrf {vrf}', 'show ipv6 interface vrf all']
exclude = [
'multicast_bytes_consumed',
'multicast_packets_consumed',
'multicast_bytes_originated',
'multicast_packets_originated',
'unicast_bytes_consumed',
'unicast_packets_consumed',
'unicast_bytes_originated',
'unicast_packets_originated',
'ipv6_multicast_groups',
'iod',
'multicast_groups',
'unicast_bytes_forwarded',
'unicast_packets_forwarded',
'ipv6_link_local']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
del interface
# Init variables
ipv6_interface_dict = {}
ipv6_addresses = None
anycast_addresses = None
virtual_add = False
multicast_groups = False
for line in out.splitlines():
line = line.rstrip()
#IPv6 Interface Status for VRF "VRF1"
p1 = re.compile(r'^\s*IPv6 *Interface *Status *for *VRF'
' *(?P<vrf>\S+)$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
vrf = vrf.replace('"',"")
continue
#Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36
#port-channel2.101, Interface status: protocol-down/link-down/admin-up, iod: 71
p2 = re.compile(r'^\s*(?:(?P<interface>[a-zA-Z0-9\/\-\.]+)), Interface'
' *status: *(?P<interface_status>[a-z\-\/]+),'
' *iod: *(?P<iod>[0-9]+)$')
m = p2.match(line)
if m:
interface = str(m.groupdict()['interface'])
interface_status = m.groupdict()['interface_status']
iod = int(m.groupdict()['iod'])
if interface not in ipv6_interface_dict:
ipv6_interface_dict[interface] = {}
ipv6_interface_dict[interface]['iod'] = iod
ipv6_interface_dict[interface]['interface_status'] = interface_status
ipv6_interface_dict[interface]['vrf'] = vrf
ipv6_interface_dict[interface]['enabled'] = True
# init multicast groups list to empty for this interface
ipv6_multicast_groups = []
ipv6_virtual_groups = []
ipv6_multicast_entries = multicast_groups = False
continue
# IPv6 address:
p3_1 = re.compile(r'^\s*IPv6 address:$')
m = p3_1.match(line)
if m:
ipv6_addresses = True
anycast_addresses = False
continue
# Anycast configured addresses:
p3_2 = re.compile(r'^\s*Anycast configured addresses:$')
m = p3_2.match(line)
if m:
anycast_addresses = True
ipv6_addresses = False
continue
# 2001:db8:1:1::1/64 [VALID]
p3_3 = re.compile(r'^\s*(?P<ip>[a-z0-9\:]+)'
'\/(?P<prefix_length>[0-9]+)'
' *\[(?P<status>[a-zA-Z]+)\]$')
m = p3_3.match(line)
if m:
ip = m.groupdict()['ip']
prefix_length = m.groupdict()['prefix_length']
status = m.groupdict()['status'].lower()
address = ip + '/' + prefix_length
if 'ipv6' not in ipv6_interface_dict[interface]:
ipv6_interface_dict[interface]['ipv6'] = {}
if address not in ipv6_interface_dict[interface]['ipv6']:
ipv6_interface_dict[interface]['ipv6'][address] = {}
ipv6_interface_dict[interface]['ipv6'][address]\
['ip'] = ip
ipv6_interface_dict[interface]['ipv6'][address]\
['prefix_length'] = prefix_length
if ipv6_addresses:
ipv6_interface_dict[interface]['ipv6'][address]\
['status'] = status
elif anycast_addresses:
ipv6_interface_dict[interface]['ipv6'][address]\
['anycast'] = True
continue
#IPv6 subnet: 2001:db8:1:1::/64
p4 = re.compile(r'^\s*IPv6 *subnet:'
' *(?P<ipv6_subnet>[a-z0-9\:\/]+)$')
m = p4.match(line)
if m:
ipv6_subnet = m.groupdict()['ipv6_subnet']
ipv6_interface_dict[interface]['ipv6']['ipv6_subnet'] = ipv6_subnet
continue
#IPv6 link-local address: fe80::a8aa:bbff:febb:cccc (default) [VALID]
p5 = re.compile(r'^\s*IPv6 *link-local *address:'
' *(?P<ipv6_link_local>[a-z0-9\:\s]+)'
' *\((?P<ipv6_link_local_state>[a-z]+)\)'
' *\[(?P<ipv6_ll_state>[A-Z]+)\]$')
m = p5.match(line)
if m:
ipv6_link_local = m.groupdict()['ipv6_link_local']
ipv6_link_local_state = m.groupdict()['ipv6_link_local_state']
ipv6_ll_state = m.groupdict()['ipv6_ll_state'].lower()
if 'ipv6' not in ipv6_interface_dict[interface]:
ipv6_interface_dict[interface]['ipv6'] = {}
ipv6_interface_dict[interface]['ipv6']['ipv6_link_local'] = ipv6_link_local
ipv6_interface_dict[interface]['ipv6']['ipv6_link_local_state'] = ipv6_link_local_state
ipv6_interface_dict[interface]['ipv6']['ipv6_ll_state'] = ipv6_ll_state
continue
#IPv6 virtual addresses configured: none
p6 = re.compile(r'^\s*IPv6 *virtual *addresses *configured:'
' *(?P<ipv6_virtual_add>\w+)$')
m = p6.match(line)
if m:
ipv6_virtual_add = m.groupdict()['ipv6_virtual_add']
ipv6_interface_dict[interface]['ipv6']['ipv6_virtual_add'] = ipv6_virtual_add
continue
#IPv6 virtual addresses configured:
# fe80::5:73ff:fea0:2 2001:db8:7746:fa41::1
p6_1 = re.compile(r'^\s*(IPv6 virtual *(?P<virtual_add>(addresses|address) configured:))$')
m = p6_1.match(line)
if m:
virtual_add = m.groupdict()['virtual_add']
ipv6_interface_dict[interface]['ipv6']['virtual_add'] = True
continue
if virtual_add:
p6_2 = re.compile(r'^\s*(?P<ipv6_virtual_addresses>[a-z0-9\:\s]+)$')
m = p6_2.match(line)
if m:
ipv6_virtual_addresses = str(m.groupdict()['ipv6_virtual_addresses'])
#split string of addresses to list
ipv6_virtual_addresses = [str(j) for j in ipv6_virtual_addresses.split()]
#Add to previous created list
for add in ipv6_virtual_addresses:
ipv6_virtual_groups.append(add)
ipv6_interface_dict[interface]['ipv6']['ipv6_virtual_groups']\
= sorted(ipv6_virtual_groups)
continue
#IPv6 multicast routing: disabled
p7 = re.compile(r'^\s*IPv6 *multicast *routing:'
' *(?P<ipv6_multicast_routing>[a-z]+)$')
m = p7.match(line)
if m:
ipv6_multicast_routing = m.groupdict()['ipv6_multicast_routing']
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_routing'] = ipv6_multicast_routing
continue
#IPv6 report link local: disabled
p8 = re.compile(r'^\s*IPv6 *report *link *local:'
' *(?P<ipv6_report_link_local>[a-z]+)$')
m = p8.match(line)
if m:
ipv6_report_link_local = m.groupdict()['ipv6_report_link_local']
ipv6_interface_dict[interface]['ipv6']['ipv6_report_link_local']\
= ipv6_report_link_local
continue
#IPv6 Forwarding feature: disabled
p9 = re.compile(r'^\s*IPv6 *Forwarding *feature:'
' *(?P<ipv6_forwarding_feature>[a-z]+)$')
m = p9.match(line)
if m:
ipv6_forwarding_feature = m.groupdict()['ipv6_forwarding_feature']
ipv6_interface_dict[interface]['ipv6']['ipv6_forwarding_feature']\
= ipv6_forwarding_feature
continue
#IPv6 multicast groups locally joined:
p10 = re.compile(r'^\s*(?P<multicast_groups>(IPv6 *multicast *(groups|group) *locally *joined:))$')
m = p10.match(line)
if m:
virtual_add = False
multicast_groups = m.groupdict()['multicast_groups']
ipv6_interface_dict[interface]['ipv6']['multicast_groups'] = True
continue
if multicast_groups:
# ff02::1:ffbb:cccc ff02::1:ff00:3 ff02::1:ff00:2 ff02::2
# ff02::1 ff02::1:ff00:1 ff02::1:ffbb:cccc ff02::1:ff00:0
# ff02::1:ffad:beef ff02::1:ff00:1(2) ff02::2(2) ff02::1(2)
p11 = re.compile(r'^\s*(?P<ipv6_multicast_group_addresses>[a-z0-9\(\)\:\s]+)$')
m = p11.match(line)
if m:
ipv6_multicast_group_addresses = str(m.groupdict()['ipv6_multicast_group_addresses'])
# Split string of addressed into a list
ipv6_multicast_group_addresses = [str(i) for i in ipv6_multicast_group_addresses.split()]
# Add to previous created list
for address in ipv6_multicast_group_addresses:
ipv6_multicast_groups.append(address)
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_groups']\
= sorted(ipv6_multicast_groups)
continue
# IPv6 multicast (S,G) entries joined: none
# IPv6 multicast (S,G) entries joined:
# (2001:20:1:1::254, ff38::1)
p12 = re.compile(r'^\s*IPv6 *multicast *\(S\,G\) *entries *joined:$')
m = p12.match(line)
if m:
ipv6_multicast_entries = True
continue
# (2001:20:1:1::254, ff38::1)
p12_1 = re.compile(r'^\s*\((?P<ip_list>.*)\)')
m = p12_1.match(line)
if m and ipv6_multicast_entries:
ipv6_multicast_entries = m.groupdict()['ip_list']
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_entries']\
= ipv6_multicast_entries
continue
#IPv6 MTU: 1600 (using link MTU)
p13 = re.compile(r'^\s*IPv6 *MTU: *(?P<ipv6_mtu>[0-9]+)'
' *\(using *link *MTU\)$')
m = p13.match(line)
if m:
ipv6_mtu = int(m.groupdict()['ipv6_mtu'])
ipv6_interface_dict[interface]['ipv6']['ipv6_mtu'] = ipv6_mtu
continue
#IPv6 unicast reverse path forwarding: none
p14 = re.compile(r'^\s*IPv6 *unicast *reverse *path *forwarding:'
' *(?P<ipv6_unicast_rev_path_forwarding>\w+)$')
m = p14.match(line)
if m:
ipv6_unicast_rev_path_forwarding = m.groupdict()\
['ipv6_unicast_rev_path_forwarding']
ipv6_interface_dict[interface]['ipv6']\
['ipv6_unicast_rev_path_forwarding']\
= ipv6_unicast_rev_path_forwarding
continue
#IPv6 load sharing: none
p15 = re.compile(r'^\s*IPv6 *load *sharing:'
' *(?P<ipv6_load_sharing>\w+)$')
m = p15.match(line)
if m:
ipv6_load_sharing = m.groupdict()['ipv6_load_sharing']
ipv6_interface_dict[interface]['ipv6']['ipv6_load_sharing']\
= ipv6_load_sharing
continue
#IPv6 interface statistics last reset: never
p16 = re.compile(r'^\s*IPv6 *interface *statistics *last *reset:'
' *(?P<ipv6_last_reset>[a-z]+)$')
m = p16.match(line)
if m:
ipv6_last_reset = m.groupdict()['ipv6_last_reset']
ipv6_interface_dict[interface]['ipv6']['ipv6_last_reset']\
= ipv6_last_reset
continue
#Unicast packets: 0/0/0
p18 = re.compile(r'^\s*Unicast *packets:'
' *(?P<unicast_packets_forwarded>[0-9]+)\/'
'(?P<unicast_packets_originated>[0-9]+)\/'
'(?P<unicast_packets_consumed>[0-9]+)$')
m = p18.match(line)
if m:
if 'counters' not in ipv6_interface_dict[interface]['ipv6']:
ipv6_interface_dict[interface]['ipv6']['counters'] = {}
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_forwarded'] = int(m.groupdict()['unicast_packets_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_originated'] = int(m.groupdict()['unicast_packets_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_consumed'] = int(m.groupdict()['unicast_packets_consumed'])
continue
#Unicast bytes: 0/0/0
p19 = re.compile(r'^\s*Unicast *bytes: *(?P<unicast_bytes_forwarded>[0-9]+)'
'\/(?P<unicast_bytes_originated>[0-9]+)\/'
'(?P<unicast_bytes_consumed>[0-9]+)$')
m = p19.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_forwarded'] = int(m.groupdict()['unicast_bytes_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_originated'] = int(m.groupdict()['unicast_bytes_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_consumed'] = int(m.groupdict()['unicast_bytes_consumed'])
continue
#Multicast packets: 0/12/9
p20 = re.compile(r'^\s*Multicast *packets: *(?P<multicast_packets_forwarded>[0-9]+)'
'\/(?P<multicast_packets_originated>[0-9]+)\/'
'(?P<multicast_packets_consumed>[0-9]+)$')
m = p20.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_forwarded'] = int(m.groupdict()['multicast_packets_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_originated'] = int(m.groupdict()['multicast_packets_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_consumed'] = int(m.groupdict()['multicast_packets_consumed'])
continue
#Multicast bytes: 0/1144/640
p21 = re.compile(r'^\s*Multicast *bytes: *(?P<multicast_bytes_forwarded>[0-9]+)\/'
'(?P<multicast_bytes_originated>[0-9]+)\/'
'(?P<multicast_bytes_consumed>[0-9]+)$')
m = p21.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_forwarded'] = int(m.groupdict()['multicast_bytes_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_originated'] = int(m.groupdict()['multicast_bytes_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_consumed'] = int(m.groupdict()['multicast_bytes_consumed'])
continue
return ipv6_interface_dict
# ====================================
# Schema for 'show ip interface brief'
# ====================================
class ShowIpInterfaceBriefSchema(MetaParser):
"""Schema for show ip interface brief"""
schema = {'interface':
{Any():
{Optional('vlan_id'):
{Optional(Any()):
{'ip_address': str,
'interface_status': str,
Optional('ipaddress_extension'): str}
},
Optional('ip_address'): str,
Optional('interface_status'): str,
Optional('ipaddress_extension'): str}
},
}
# ====================================
# Parser for 'show ip interface brief'
# ====================================
class ShowIpInterfaceBrief(ShowIpInterfaceBriefSchema):
"""Parser for show ip interface brief"""
#*************************
# schema - class variable
#
# Purpose is to make sure the parser always return the output
# (nested dict) that has the same data structure across all supported
# parsing mechanisms (cli(), yang(), xml()).
cli_command = 'show ip interface brief'
exclude = [
'(tunnel.*)']
def __init__ (self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cmd = self.cli_command
def cli(self, output=None):
''' parsing mechanism: cli
Function cli() defines the cli type output parsing mechanism which
typically contains 3 steps: exe
cuting, transforming, returning
'''
if output is None:
out = self.device.execute(self.cmd)
else:
out = output
interface_dict = {}
for line in out.splitlines():
line = line.rstrip()
p1 = re.compile(r'^\s*Interface +IP Address +Interface Status$')
m = p1.match(line)
if m:
continue
p2 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\.\-]+) +(?P<ip_address>[a-z0-9\.]+) +(?P<interface_status>[a-z\-\/]+)$')
m = p2.match(line)
if m:
interface = m.groupdict()['interface']
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if interface not in interface_dict['interface']:
interface_dict['interface'][interface] = {}
if 'Vlan' in interface:
vlan_id = str(int(re.search(r'\d+', interface).group()))
if 'vlan_id' not in interface_dict['interface'][interface]:
interface_dict['interface'][interface]['vlan_id'] = {}
if vlan_id not in interface_dict['interface'][interface]['vlan_id']:
interface_dict['interface'][interface]['vlan_id'][vlan_id] = {}
interface_dict['interface'][interface]['vlan_id'][vlan_id]['ip_address'] = \
m.groupdict()['ip_address']
interface_dict['interface'][interface]['vlan_id'][vlan_id]['interface_status'] = \
m.groupdict()['interface_status']
else:
interface_dict['interface'][interface]['ip_address'] = \
m.groupdict()['ip_address']
interface_dict['interface'][interface]['interface_status'] = \
m.groupdict()['interface_status']
continue
p3 = re.compile(r'^\s*(?P<ipaddress_extension>\([a-z0-9]+\))$')
m = p3.match(line)
if m:
ipaddress_extension = m.groupdict()['ipaddress_extension']
if 'Vlan' in interface:
new_ip_address = interface_dict['interface']\
[interface]['vlan_id'][vlan_id]['ip_address'] + ipaddress_extension
interface_dict['interface'][interface]['vlan_id'][vlan_id]['ip_address'] = \
new_ip_address
else:
new_ip_address = interface_dict['interface']\
[interface]['ip_address'] + ipaddress_extension
interface_dict['interface'][interface]['ip_address'] = new_ip_address
continue
return interface_dict
# ===========================================
# Parser for 'show ip interface brief | vlan'
# ===========================================
class ShowIpInterfaceBriefPipeVlan(ShowIpInterfaceBrief):
"""Parser for show ip interface brief | include Vlan"""
#*************************
# schema - class variable
#
# Purpose is to make sure the parser always return the output
# (nested dict) that has the same data structure across all supported
# parsing mechanisms (cli(), yang(), xml()).
cli_command = 'show ip interface brief | include Vlan'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cmd = self.cli_command
# =================================
# Schema for 'show interface brief'
# =================================
class ShowInterfaceBriefSchema(MetaParser):
"""Schema for show interface brief"""
schema = {'interface':
{'ethernet':
{Any():
{'vlan': str,
'type': str,
'mode': str,
'status': str,
'speed': str,
'reason': str,
'port_ch': str}
},
Optional('port'):
{Any():
{Optional('vrf'): str,
Optional('status'): str,
Optional('ip_address'): str,
Optional('speed'): str,
Optional('mtu'): int}
},
Optional('port_channel'):
{Any():
{Optional('vlan'): str,
Optional('type'): str,
Optional('mode'): str,
Optional('status'): str,
Optional('speed'): str,
Optional('reason'): str,
Optional('protocol'): str}
},
Optional('loopback'):
{Any():
{Optional('status'): str,
Optional('description'): str}
},
}
}
# =================================
# Parser for 'show interface brief'
# =================================
class ShowInterfaceBrief(ShowInterfaceBriefSchema):
'''Parser for:
* show interface brief
* show interface {interface} brief
'''
cli_command = ['show interface brief',
'show interface {interface} brief']
exclude = ['reason']
def cli(self, interface=None, output=None):
if output is None:
# Determine command
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
# Execute command
output = self.device.execute(cmd)
# Init
parsed_dict = {}
# Port VRF Status IP Address Speed MTU
p1 = re.compile(r'^Port +VRF +Status +IP Address +Speed +MTU$')
# mgmt0 -- up 172.25.143.76 1000 1500
p2 = re.compile(r'^(?P<port>[a-zA-Z0-9]+) +(?P<vrf>[a-zA-Z0-9\-]+)'
' +(?P<status>[a-zA-Z]+) +(?P<ip_address>(\S+))'
' +(?P<speed>[0-9]+) +(?P<mtu>[0-9]+)$')
# Ethernet VLAN Type Mode Status Reason Speed Port
p3 = re.compile(r'^Ethernet +VLAN +Type +Mode +Status +Reason +Speed'
' +Port$')
# Eth1/6 1 eth access down Link not connected auto(D) --
p4 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<vlan>[a-zA-Z0-9\-]+)'
' +(?P<type>[a-zA-Z]+) +(?P<mode>[a-z]+)'
' +(?P<status>[a-z]+) +(?P<reason>[a-zA-Z\s]+)'
' +(?P<speed>[0-9a-zA-Z\(\)\s]+)'
' +(?P<port>[0-9\-]+)$')
# Port-channel VLAN Type Mode Status Reason Speed Protocol
p5 = re.compile(r'^Port-channel +VLAN +Type +Mode +Status +Reason'
' +Speed +Protocol$')
# Po8 1 eth access down No operational members auto(I) none
p6 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<vlan>[a-zA-Z0-9\-]+)'
' +(?P<type>[a-zA-Z]+) +(?P<mode>[a-z]+)'
' +(?P<status>[a-z]+) +(?P<reason>[a-zA-Z\s]+)'
' +(?P<speed>[0-9a-zA-Z\(\)\s]+)'
' +(?P<protocol>[a-zA-Z0-9\-]+)$')
# Interface Status Description
p7 = re.compile(r'^Interface +Status +Description$')
# Lo0 up --
p8 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<status>[a-z]+)'
' +(?P<description>[a-zA-Z\s\-]+)$')
for line in output.splitlines():
line = line.strip()
# Port VRF Status IP Address Speed MTU
m = p1.match(line)
if m:
port_dict = parsed_dict.setdefault('interface', {}).\
setdefault('port', {})
continue
# mgmt0 -- up 172.25.143.76 1000 1500
m = p2.match(line)
if m:
group = m.groupdict()
intf_dict = port_dict.\
setdefault(Common.convert_intf_name(group['port']), {})
intf_dict['vrf'] = group['vrf']
intf_dict['status'] = group['status']
intf_dict['ip_address'] = group['ip_address']
intf_dict['speed'] = group['speed']
intf_dict['mtu'] = int(group['mtu'])
continue
# Ethernet VLAN Type Mode Status Reason Speed Port
m = p3.match(line)
if m:
eth_dict = parsed_dict.setdefault('interface', {}).\
setdefault('ethernet', {})
continue
# Eth1/6 1 eth access down Link not connected auto(D) --
m = p4.match(line)
if m:
group = m.groupdict()
intf_dict = eth_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['vlan'] = group['vlan']
intf_dict['type'] = group['type']
intf_dict['mode'] = group['mode']
intf_dict['status'] = group['status']
intf_dict['reason'] = group['reason'].strip()
intf_dict['speed'] = group['speed']
intf_dict['port_ch'] = group['port']
continue
# Port-channel VLAN Type Mode Status Reason Speed Protocol
m = p5.match(line)
if m:
pch_dict = parsed_dict.setdefault('interface', {}).\
setdefault('port_channel', {})
continue
# Po8 1 eth access down No operational members auto(I) none
m = p6.match(line)
if m:
group = m.groupdict()
intf_dict = pch_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['vlan'] = group['vlan']
intf_dict['type'] = group['type']
intf_dict['mode'] = group['mode']
intf_dict['status'] = group['status']
intf_dict['reason'] = group['reason'].strip()
intf_dict['speed'] = group['speed']
intf_dict['protocol'] = group['protocol']
continue
# Interface Status Description
m = p7.match(line)
if m:
loopback_dict = parsed_dict.setdefault('interface', {}).\
setdefault('loopback', {})
continue
# Lo0 up --
m = p8.match(line)
if m:
group = m.groupdict()
intf_dict = loopback_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['status'] = group['status']
intf_dict['description'] = group['description']
continue
return parsed_dict
# =================================================
# Schema for 'show running-config interface <WORD>'
# =================================================
class ShowRunningConfigInterfaceSchema(MetaParser):
"""Schema for show running-config interface <WORD>"""
schema = {'interface':
{Any():
{Optional('shutdown'): bool,
Optional('switchport'): bool,
Optional('switchport_mode'): str,
Optional('trunk_vlans'): str,
Optional('port_channel'):{
Optional('port_channel_mode'): str,
Optional('port_channel_int'): str,
},
Optional('host_reachability_protocol'): str,
Optional('source_interface'): str,
Optional('member_vni'):
{Any():
{Optional('associate_vrf'): bool,
Optional('mcast_group'): str,
Optional('suppress_arp'): bool,
}
},
}
},
}
# =================================================
# Parser for 'show running-config interface <WORD>'
# =================================================
class ShowRunningConfigInterface(ShowRunningConfigInterfaceSchema):
"""Parser for show running-config interface <WORD>"""
cli_command = 'show running-config interface {interface}'
def cli(self, interface, output=None):
if output is None:
out = self.device.execute(self.cli_command.format(interface=interface))
else:
out = output
# Init vars
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# interface nve1
# interface Ethernet1/1
p1 = re.compile(r'^interface +(?P<intf_name>\S+)$')
m = p1.match(line)
if m:
interface = str(m.groupdict()['intf_name'])
interface_dict = ret_dict.setdefault('interface', {}). \
setdefault(interface, {})
continue
# no shutdown
p2 = re.compile(r'^\s*no shutdown$')
m = p2.match(line)
if m:
interface_dict['shutdown'] = False
continue
# host-reachability protocol bgp
p3 = re.compile(r'^\s*host-reachability protocol +(?P<protocol>[a-zA-Z]+)$')
m = p3.match(line)
if m:
interface_dict['host_reachability_protocol'] = \
str(m.groupdict()['protocol'])
continue
# source-interface loopback1
p4 = re.compile(r'^\s*source-interface +(?P<src_intf>[a-zA-Z0-9\-]+)$')
m = p4.match(line)
if m:
interface_dict['source_interface'] = \
str(m.groupdict()['src_intf'])
continue
# member vni 8100
# member vni 9100 associate-vrf
# member vni 2001201-2001300
p5 = re.compile(r'^\s*member vni +(?P<vni>[0-9\-]+)( +(?P<associate_vrf>[a-zA-Z\-]+))?$')
m = p5.match(line)
if m:
if 'member_vni' not in interface_dict:
interface_dict['member_vni'] = {}
vni = str(m.groupdict()['vni'])
if '-' in vni:
vni_range = re.findall(r'(?P<first_vni>[0-9]+)\-(?P<last_vni>[0-9]+)?$', vni)
members = range(int(vni_range[0][0]), int(vni_range[0][1])+1)
else:
members = [vni]
for memb in members:
interface_dict['member_vni'][str(memb)] = {}
if m.groupdict()['associate_vrf']:
interface_dict['member_vni'][str(memb)]['associate_vrf'] = \
True
continue
# mcast-group 225.0.1.25
p6 = re.compile(r'^\s*mcast-group +(?P<ip>[0-9\.]+)$')
m = p6.match(line)
if m:
for memb in members:
interface_dict['member_vni'][str(memb)]['mcast_group'] = \
str(m.groupdict()['ip'])
continue
# suppress-arp
p7 = re.compile(r'^\s*suppress-arp$')
m = p7.match(line)
if m:
for memb in members:
interface_dict['member_vni'][str(memb)]['suppress_arp'] = \
True
continue
# switchport
p8 = re.compile(r'^switchport$')
m = p8.match(line)
if m:
interface_dict.update({'switchport': True})
continue
# switchport mode trunk
p9 = re.compile(r'^switchport +mode +(?P<mode>\S+)$')
m = p9.match(line)
if m:
group = m.groupdict()
interface_dict.update({'switchport_mode': group['mode']})
continue
# switchport trunk allowed vlan 1-99,101-199,201-1399,1401-4094
p10 = re.compile(r'^switchport +trunk +allowed +vlan +(?P<trunk_vlans>\S+)$')
m = p10.match(line)
if m:
group = m.groupdict()
interface_dict.update({'trunk_vlans': group['trunk_vlans']})
continue
# channel-group 1 mode active
p11 = re.compile(r'^channel-group +(?P<port_channel_int>\d+) +mode +(?P<mode>\S+)$')
m = p11.match(line)
if m:
group = m.groupdict()
port_channel_dict = interface_dict.setdefault('port_channel', {})
port_channel_dict.update({'port_channel_int': group['port_channel_int']})
port_channel_dict.update({'port_channel_mode': group['mode']})
continue
return ret_dict
# ===============================
# Schema for 'show nve interface'
# ===============================
class ShowNveInterfaceSchema(MetaParser):
"""Schema for show nve interface"""
schema = {'interface':
{Any():
{'state': str,
Optional('encapsulation'): str,
Optional('source_interface'):
{Any():
{Optional('primary'): str,
Optional('secondary'): str,
}
},
Optional('vpc_capability'):
{Any():
{Optional('notified'): bool,
}
},
}
},
}
# ===============================
# Parser for 'show nve interface'
# ===============================
class ShowNveInterface(ShowNveInterfaceSchema):
"""Parser for show nve interface"""
cli_command = 'show nve interface {interface} detail'
def cli(self, interface, output=None):
cmd = ""
if output is None:
if interface:
cmd = self.cli_command.format(interface=interface)
out = self.device.execute(cmd)
else:
out = output
# Init vars
interface_dict = {}
# Interface: nve1, State: Up, encapsulation: VXLAN
p1 = re.compile(r'^\s*Interface: +(?P<intf>[\w]+)\,'
' +State: +(?P<state>[\w]+)\, +encapsulation:'
' +(?P<encapsulation>[\w]+)$')
# Source-Interface: loopback0 (primary: 10.4.0.1, secondary: 0.0.0.0)
p2 = re.compile(r'^\s*Source-Interface: +(?P<src_intf>[a-zA-Z0-9\-]+)'
' +\(primary: +(?P<primary>[a-zA-Z0-9\.]+)\, +secondary:'
' +(?P<secondary>[a-zA-Z0-9\.]+)\)$')
# VPC Capability: VPC-VIP-Only [not-notified]
p3 = re.compile(r'^\s*VPC Capability: +(?P<vpc>[a-zA-Z0-9\-]+)'
' +\[(?P<notified>[a-zA-Z\-]+)\]$')
for line in out.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
intf = str(m.groupdict()['intf'])
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if intf not in interface_dict['interface']:
interface_dict['interface'][intf] = {}
interface_dict['interface'][intf]['state'] = \
str(m.groupdict()['state'])
interface_dict['interface'][intf]['encapsulation'] = \
str(m.groupdict()['encapsulation'])
continue
m = p2.match(line)
if m:
src_intf = str(m.groupdict()['src_intf'])
if 'source_interface' not in interface_dict['interface'][intf]:
interface_dict['interface'][intf]['source_interface'] = {}
if src_intf not in interface_dict['interface'][intf]['source_interface']:
interface_dict['interface'][intf]['source_interface'][src_intf] = {}
interface_dict['interface'][intf]['source_interface'][src_intf]['primary'] = \
str(m.groupdict()['primary'])
interface_dict['interface'][intf]['source_interface'][src_intf]['secondary'] = \
str(m.groupdict()['secondary'])
continue
m = p3.match(line)
if m:
vpc = str(m.groupdict()['vpc'])
notified = str(m.groupdict()['notified'])
if 'vpc_capability' not in interface_dict['interface'][intf]:
interface_dict['interface'][intf]['vpc_capability'] = {}
if vpc not in interface_dict['interface'][intf]['vpc_capability']:
interface_dict['interface'][intf]['vpc_capability'][vpc] = {}
if notified == 'notified':
interface_dict['interface'][intf]['vpc_capability'][vpc]['notified'] = \
True
else:
interface_dict['interface'][intf]['vpc_capability'][vpc]['notified'] = \
False
continue
return interface_dict
# ============================================
# Schema for 'show ip interface brief vrf all'
# ============================================
class ShowIpInterfaceBriefVrfAllSchema(MetaParser):
"""Schema for show ip interface brief vrf all"""
schema = {'interface':
{Any():
{Optional('ip_address'): str,
Optional('interface_status'): str}
},
}
# ============================================
# Schema for 'show ip interface brief vrf all'
# ============================================
class ShowIpInterfaceBriefVrfAll(ShowIpInterfaceBriefVrfAllSchema):
"""Parser for show ip interface brief vrf all"""
#*************************
# schema - class variable
#
# Purpose is to make sure the parser always return the output
# (nested dict) that has the same data structure across all supported
# parsing mechanisms (cli(), yang(), xml()).
cli_command = ['show ip interface brief vrf all | include {ip}', 'show ip interface brief vrf all']
def cli(self, ip='', output=None):
''' parsing mechanism: cli
Function cli() defines the cli type output parsing mechanism which
typically contains 3 steps: exe
cuting, transforming, returning
'''
if output is None:
if ip:
cmd = self.cli_command[0].format(ip=ip)
else:
cmd = self.cli_command[1]
out = self.device.execute(cmd)
else:
out = output
interface_dict = {}
# mgmt0 10.255.5.169 protocol-up/link-up/admin-up
p = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\.\-]+) '
'+(?P<ip_address>[a-z0-9\.]+) +(?P<interface_status>[a-z\-\/]+)$')
for line in out.splitlines():
line = line.rstrip()
m = p.match(line)
if m:
interface = m.groupdict()['interface']
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if interface not in interface_dict['interface']:
interface_dict['interface'][interface] = {}
interface_dict['interface'][interface]['ip_address'] = \
str(m.groupdict()['ip_address'])
interface_dict['interface'][interface]['interface_status'] = \
str(m.groupdict()['interface_status'])
continue
return interface_dict
#############################################################################
# Parser For show interface Description
#############################################################################
class ShowInterfaceDescriptionSchema(MetaParser):
"""schema for show interface description
"""
schema = {
'interfaces': {
Any(): {
Optional('type'): str,
Optional('speed'): str,
'description': str
}
}
}
class ShowInterfaceDescription(ShowInterfaceDescriptionSchema):
"""parser for show interface description
"""
cli_command = ['show interface description', 'show interface {interface} description']
def cli(self, interface="", output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
result_dict = {}
index = 1
# -------------------------------------------------------------------------------
# Port Type Speed Description
# -------------------------------------------------------------------------------
# Eth1/1 eth 10G --
p1 = re.compile(r'(?P<interface>(\S+)) +(?P<type>(\S+))? +(?P<speed>(\S+))? +(?P<description>(.*))$')
for line in out.splitlines():
line = line.strip()
line = line.replace('\t', '')
# -------------------------------------------------------------------------------
# Port Type Speed Description
# -------------------------------------------------------------------------------
# Eth1/1 eth 10G --
m = p1.match(line)
if m and m.groupdict()['description'] != 'Description':
group = m.groupdict()
interface = Common.convert_intf_name(group['interface'])
intf_dict = result_dict.setdefault('interfaces', {}).setdefault(interface, {})
if group['type'] is not None:
intf_dict['type'] = str(group['type'])
if group['speed'] is not None:
intf_dict['speed'] = str(group['speed'])
intf_dict['description'] = str(group['description'])
index += 1
continue
return result_dict
| 43.635455 | 133 | 0.496503 |
import re
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
from genie.libs.parser.utils.common import Common
class ShowInterfaceSchema(MetaParser):
schema = {
Any():
{Optional('description'): str,
Optional('types'): str,
Optional('parent_interface'): str,
'oper_status': str,
Optional('admin_state'): str,
Optional('dedicated_intface'): bool,
Optional('line_protocol'): str,
Optional('autostate'): bool,
Optional('link_state'): str,
Optional('phys_address'): str,
Optional('port_speed'): str,
Optional('mtu'): int,
'enabled': bool,
Optional('mac_address'): str,
Optional('auto_negotiate'): bool,
Optional('duplex_mode'): str,
Optional('port_mode'): str,
Optional('auto_mdix'): str,
Optional('switchport_monitor'): str,
Optional('efficient_ethernet'): str,
Optional('last_link_flapped'): str,
Optional('interface_reset'): int,
Optional('ethertype'): str,
Optional('beacon'): str,
Optional('medium'): str,
Optional('reliability'): str,
Optional('txload'): str,
Optional('rxload'): str,
Optional('delay'): int,
Optional('media_type'): str,
Optional('flow_control'):
{Optional('receive'): bool,
Optional('send'): bool,
},
Optional('port_channel'):
{Optional('port_channel_member'): bool,
Optional('port_channel_int'): str,
Optional('port_channel_member_intfs'): list
},
Optional('bandwidth'): int,
Optional('counters'):
{Optional('rate'):
{Optional('load_interval'): int,
Optional('in_rate'): int,
Optional('in_rate_pkts'): int,
Optional('out_rate'): int,
Optional('out_rate_pkts'): int,
Optional('in_rate_bps'): int,
Optional('in_rate_pps'): int,
Optional('out_rate_bps'): int,
Optional('out_rate_pps'): int,
},
Optional('in_unicast_pkts'): int,
Optional('in_multicast_pkts'): int,
Optional('in_broadcast_pkts'): int,
Optional('in_discards'): int,
Optional('in_crc_errors'): int,
Optional('in_oversize_frames'): int,
Optional('in_pkts'): int,
Optional('in_mac_pause_frames'): int,
Optional('in_jumbo_packets'): int,
Optional('in_storm_suppression_packets'): int,
Optional('in_runts'): int,
Optional('in_oversize_frame'): int,
Optional('in_overrun'): int,
Optional('in_underrun'): int,
Optional('in_ignored'): int,
Optional('in_watchdog'): int,
Optional('in_bad_etype_drop'): int,
Optional('in_unknown_protos'): int,
Optional('in_if_down_drop'): int,
Optional('in_with_dribble'): int,
Optional('in_discard'): int,
Optional('in_octets'): int,
Optional('in_errors'): int,
Optional('in_short_frame'): int,
Optional('in_no_buffer'): int,
Optional('out_pkts'): int,
Optional('out_unicast_pkts'): int,
Optional('out_multicast_pkts'): int,
Optional('out_broadcast_pkts'): int,
Optional('out_discard'): int,
Optional('out_octets'): int,
Optional('out_jumbo_packets'): int,
Optional('out_errors'): int,
Optional('out_collision'): int,
Optional('out_deferred'): int,
Optional('out_late_collision'): int,
Optional('out_lost_carrier'): int,
Optional('out_no_carrier'): int,
Optional('out_babble'): int,
Optional('last_clear'): str,
Optional('tx'): bool,
Optional('rx'): bool,
Optional('out_mac_pause_frames'): int,
},
Optional('encapsulations'):
{Optional('encapsulation'): str,
Optional('first_dot1q'): str,
Optional('native_vlan'): int,
},
Optional('ipv4'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('secondary'): bool,
Optional('route_tag'): str
},
},
},
}
class ShowInterface(ShowInterfaceSchema):
cli_command = ['show interface', 'show interface {interface}']
exclude = [
'in_unicast_pkts',
'out_unicast_pkts',
'in_octets',
'out_octets',
'in_pkts',
'out_pkts',
'in_multicast_pkts',
'out_multicast_pkts',
'in_rate',
'out_rate',
'in_broadcast_pkts',
'out_broadcast_pkts',
'last_link_flapped',
'in_rate_pkts',
'out_rate_pkts',
'out_rate_bps',
'in_rate_bps',
'interface_reset',
'in_rate_pps',
'out_rate_pps',
'last_clear',
'out_jumbo_packets',
'in_jumbo_packets',
'rxload',
'txload',
'in_errors',
'mac_address',
'phys_address',
'in_crc_errors',
'reliability']
def cli(self, interface="", output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
p1 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>(down))'
'( *\((?P<link_state>[a-zA-Z0-9\-\s]+)\))?$')
p1_1 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>[\w\s]+)'
'( *\((?P<link_state>[\w\-\/\s]+)\))?, +'
'line +protocol +is +(?P<line_protocol>\w+),? *'
'(autostate +(?P<autostate>\w+))?$')
p1_2 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/\.\-]+) *is'
' *(?P<enabled>(up))'
'( *\((?P<link_state>[a-zA-Z\s]+)\))?$')
p2 = re.compile(r'^admin +state +is'
' +(?P<admin_state>([a-zA-Z0-9\/\.]+))(?:,)?'
'(?: +(?P<dedicated_intf>(Dedicated Interface)))?'
'(?:, +\[parent +interface +is'
' +(?P<parent_intf>(\S+))\])?$')
p2_1 = re.compile(r'^Dedicated Interface$')
p2_2 = re.compile(r'^Belongs *to *(?P<port_channel_int>[a-zA-Z0-9]+)$')
p3 = re.compile(r'^Hardware: *(?P<types>[a-zA-Z0-9\/\s]+),'
' *address: *(?P<mac_address>[a-z0-9\.]+)'
' *\(bia *(?P<phys_address>[a-z0-9\.]+)\)$')
p4 = re.compile(r'^Description: *(?P<description>.*)$')
p5 = re.compile(r'^Internet *Address *is *(?P<ip>[0-9\.]+)'
'\/(?P<prefix_length>[0-9]+)'
'(?: *(?P<secondary>(secondary)))?(?: *tag'
' *(?P<route_tag>[0-9]+))?$')
p6 = re.compile(r'^MTU *(?P<mtu>[0-9]+) *bytes, *BW'
' *(?P<bandwidth>[0-9]+) *Kbit(, *DLY'
' *(?P<delay>[0-9]+) *usec)?,?$')
p6_1 = re.compile(r'^MTU *(?P<mtu>[0-9]+) *bytes, *BW'
' *(?P<bandwidth>[0-9]+) *Kbit, *,? *BW'
' *([0-9]+) *Kbit, *DLY'
' *(?P<delay>[0-9]+) *usec$')
p7 = re.compile(r'^reliability *(?P<reliability>[0-9\/]+),'
' *txload *(?P<txload>[0-9\/]+),'
' *rxload *(?P<rxload>[0-9\/]+)$')
p8 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *medium *is *(?P<medium>[a-zA-Z]+)$')
p8_1 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *Vlan *ID *(?P<first_dot1q>[0-9]+),'
' *medium *is *(?P<medium>[a-z0-9]+)$')
p8_2 = re.compile(r'^Encapsulation *(?P<encapsulation>[a-zA-Z0-9\.\s]+),'
' *([\w\s]+)$')
p9 = re.compile(r'^Port *mode *is *(?P<port_mode>[a-z]+)$')
p10_1 = re.compile(r'^auto-duplex, +auto-speed$')
p10 = re.compile(r'^(?P<duplex_mode>[a-z]+)-duplex, *(?P<port_speed>[a-z0-9\-]+)(?: '
'*[G|M]b/s)?(?:, +media +type +is (?P<media_type>\w+))?$')
p11 = re.compile(r'^Beacon *is *turned *(?P<beacon>[a-z]+)$')
p12 = re.compile(r'^Auto-Negotiation *is *turned'
' *(?P<auto_negotiate>(off))$')
p12_1 = re.compile(r'^Auto-Negotiation *is *turned'
' *(?P<auto_negotiate>(on))$')
p13 = re.compile(r'^Input *flow-control *is *(?P<receive>(off)+),'
' *output *flow-control *is *(?P<send>(off)+)$')
p13_1 = re.compile(r'^Input *flow-control *is *(?P<receive>(on)+),'
' *output *flow-control *is *(?P<send>(on)+)$')
p14 = re.compile(r'^Auto-mdix *is *turned *(?P<auto_mdix>[a-z]+)$')
p15 = re.compile(r'^Switchport *monitor *is *(?P<switchport_monitor>[a-z]+)$')
p16 = re.compile(r'^EtherType *is *(?P<ethertype>[a-z0-9]+)$')
p38 = re.compile(r'^Members +in +this +channel *: *'
'(?P<port_channel_member_intfs>[\w\/\.\-\,\s]+)$')
p17 = re.compile(r'^EEE *\(efficient-ethernet\) *:'
' *(?P<efficient_ethernet>[A-Za-z\/]+)$')
p18 = re.compile(r'^Last *link *flapped'
' *(?P<last_link_flapped>[a-z0-9\:]+)$')
p19 = re.compile(r'^Last *clearing *of *\"show *interface\"'
' *counters *(?P<last_clear>[a-z0-9\:]+)$')
p19_1 = re.compile(r'^Last *clearing *of *\" *\"'
' *counters *(?P<last_clear>[a-z0-9\:]+)$')
p20 = re.compile(r'^(?P<interface_reset>[0-9]+) *interface'
' *resets$')
p21 = re.compile(r'^(?P<load_interval>[0-9\#]+)'
' *(minute|second|minutes|seconds) *input *rate'
' *(?P<in_rate>[0-9]+) *bits/sec,'
' *(?P<in_rate_pkts>[0-9]+) *packets/sec$')
p22 = re.compile(r'^(?P<load_interval>[0-9\#]+)'
' *(minute|second|minutes|seconds) *output'
' *rate *(?P<out_rate>[0-9]+)'
' *bits/sec, *(?P<out_rate_pkts>[0-9]+)'
' *packets/sec$')
p23 = re.compile(r'^input *rate *(?P<in_rate_bps>[0-9]+) *bps,'
' *(?P<in_rate_pps>[0-9]+) *pps; *output *rate'
' *(?P<out_rate_bps>[0-9]+) *bps,'
' *(?P<out_rate_pps>[0-9]+) *pps$')
p23_1 = re.compile(r'^(?P<rx>(RX))$')
p24 = re.compile(r'^(?P<in_unicast_pkts>[0-9]+) +unicast +packets'
' +(?P<in_multicast_pkts>[0-9]+) +multicast +packets'
' +(?P<in_broadcast_pkts>[0-9]+) +broadcast +packets$')
p25 = re.compile(r'^(?P<in_pkts>[0-9]+) +input +packets(?: '
'+(?P<in_octets>[0-9]+) +bytes)?(?: +(?P<in_unicast_pkts>[0-9]+) '
'+unicast +packets +(?P<in_multicast_pkts>[0-9]+) +multicast +packets)?$')
p26 = re.compile(r'^(?P<in_jumbo_packets>[0-9]+) +jumbo +packets'
' *(?P<in_storm_suppression_packets>[0-9]+)'
' *storm *suppression *packets$')
p27 = re.compile(r'^(?P<in_runts>[0-9]+) *runts'
' *(?P<in_oversize_frame>[0-9]+) *giants'
' *(?P<in_crc_errors>[0-9]+) *CRC(/FCS)?'
' *(?P<in_no_buffer>[0-9]+) *no *buffer$')
p28 = re.compile(r'^(?P<in_errors>[0-9]+) *input *error'
' *(?P<in_short_frame>[0-9]+) *short *frame'
' *(?P<in_overrun>[0-9]+) *overrun *(?P<in_underrun>[0-9]+)'
' *underrun *(?P<in_ignored>[0-9]+) *ignored$')
p29 = re.compile(r'^(?P<in_watchdog>[0-9]+) *watchdog'
' *(?P<in_bad_etype_drop>[0-9]+)'
' *bad *etype *drop *(?P<in_unknown_protos>[0-9]+)'
' *bad *proto'
' *drop *(?P<in_if_down_drop>[0-9]+) *if *down *drop$')
p30 = re.compile(r'^(?P<in_with_dribble>[0-9]+) *input *with'
' *dribble *(?P<in_discard>[0-9]+) *input *discard$')
p31 = re.compile(r'^(?P<in_mac_pause_frames>[0-9]+) *Rx *pause$')
p31_1 = re.compile(r'^(?P<tx>(TX))$')
p32 = re.compile(r'^(?P<out_unicast_pkts>[0-9]+) *unicast *packets'
' *(?P<out_multicast_pkts>[0-9]+) *multicast *packets'
' *(?P<out_broadcast_pkts>[0-9]+) *broadcast *packets$')
p33 = re.compile(r'^(?P<out_pkts>[0-9]+) *output *packets'
' *(?P<out_octets>[0-9]+) *bytes$')
p34 = re.compile(r'^(?P<out_jumbo_packets>[0-9]+) *jumbo *packets$')
p35 = re.compile(r'^(?P<out_errors>[0-9]+) *output *error'
' *(?P<out_collision>[0-9]+) *collision'
' *(?P<out_deferred>[0-9]+) *deferred'
' *(?P<out_late_collision>[0-9]+)'
' *late *collision$')
p36 = re.compile(r'^(?P<out_lost_carrier>[0-9]+) *lost *carrier'
' *(?P<out_no_carrier>[0-9]+) *no *carrier'
' *(?P<out_babble>[0-9]+) *babble'
' *(?P<out_discard>[0-9]+) *output *discard$')
p37 = re.compile(r'^(?P<out_mac_pause_frames>[0-9]+) *Tx *pause$')
p38 = re.compile(r'^Members +in +this +channel *: *'
'(?P<port_channel_member_intfs>[\w\/\.\-\,\s]+)$')
p39 = re.compile(r'^(?P<in_broadcast_pkts>[0-9]+) +broadcast +packets +(?P<in_octets>[0-9]+) +bytes$')
interface_dict = {}
rx = False
tx = False
for line in out.splitlines():
line = line.replace('\t', ' ')
line = line.strip()
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
interface_dict[interface]['enabled'] = False
interface_dict[interface]['oper_status'] = 'down'
continue
m = p1_1.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
line_protocol = m.groupdict()['line_protocol']
autostate = m.groupdict()['autostate']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
if enabled:
enabled = enabled.lower()
interface_dict[interface]['enabled'] = False if 'down' in enabled else True
interface_dict[interface]['oper_status'] = enabled.strip()
if line_protocol:
interface_dict[interface]['line_protocol'] = line_protocol.lower()
if autostate:
interface_dict[interface]['autostate'] = True if \
autostate.lower() == 'enabled' else False
continue
m = p1_2.match(line)
if m:
interface = m.groupdict()['interface']
enabled = m.groupdict()['enabled']
link_state = m.groupdict()['link_state']
if interface not in interface_dict:
interface_dict[interface] = {}
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = False
if link_state:
interface_dict[interface]\
['link_state'] = link_state
interface_dict[interface]['enabled'] = True
interface_dict[interface]['oper_status'] = 'up'
continue
m = p2.match(line)
if m:
interface_dict[interface]['admin_state'] = \
m.groupdict()['admin_state']
if m.groupdict()['dedicated_intf']:
interface_dict[interface]['dedicated_intface'] = True
if m.groupdict()['parent_intf']:
interface_dict[interface]['parent_interface'] = \
m.groupdict()['parent_intf']
continue
m = p2_1.match(line)
if m:
interface_dict[interface]['dedicated_intface'] = True
continue
m = p2_2.match(line)
if m:
port_channel_int = str(m.groupdict()['port_channel_int'])
if 'port_channel' not in interface_dict[interface]:
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = True
interface_dict[interface]['port_channel']\
['port_channel_int'] = Common.convert_intf_name(port_channel_int)
continue
m = p3.match(line)
if m:
types = m.groupdict()['types']
mac_address = m.groupdict()['mac_address']
phys_address = m.groupdict()['phys_address']
interface_dict[interface]['types'] = types
interface_dict[interface]\
['mac_address'] = mac_address
interface_dict[interface]\
['phys_address'] = phys_address
continue
m = p4.match(line)
if m:
description = m.groupdict()['description']
interface_dict[interface]['description'] = description
continue
m = p5.match(line)
if m:
ip = m.groupdict()['ip']
prefix_length = str(m.groupdict()['prefix_length'])
secondary = m.groupdict()['secondary']
route_tag = m.groupdict()['route_tag']
address = ip + '/' + prefix_length
if 'ipv4' not in interface_dict[interface]:
interface_dict[interface]['ipv4'] = {}
if address not in interface_dict[interface]['ipv4']:
interface_dict[interface]['ipv4'][address] = {}
interface_dict[interface]['ipv4'][address]\
['ip'] = ip
interface_dict[interface]['ipv4'][address]\
['prefix_length'] = prefix_length
if secondary:
interface_dict[interface]['ipv4'][address]\
['secondary'] = True
if route_tag:
interface_dict[interface]['ipv4'][address]\
['route_tag'] = route_tag
continue
m = p6.match(line)
if m:
mtu = int(m.groupdict()['mtu'])
bandwidth = int(m.groupdict()['bandwidth'])
if m.groupdict()['delay']:
interface_dict[interface]['delay'] = int(m.groupdict()['delay'])
interface_dict[interface]['mtu'] = mtu
interface_dict[interface]['bandwidth'] = bandwidth
continue
m = p6_1.match(line)
if m:
mtu = int(m.groupdict()['mtu'])
bandwidth = int(m.groupdict()['bandwidth'])
interface_dict[interface]['mtu'] = mtu
interface_dict[interface]['bandwidth'] = bandwidth
interface_dict[interface]['delay'] = int(m.groupdict()['delay'])
continue
m = p7.match(line)
if m:
reliability = m.groupdict()['reliability']
txload = m.groupdict()['txload']
rxload = m.groupdict()['rxload']
interface_dict[interface]['reliability'] = reliability
interface_dict[interface]['txload'] = txload
interface_dict[interface]['rxload'] = rxload
continue
m = p8.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
encapsulation = encapsulation.replace("802.1q virtual lan","dot1q")
medium = m.groupdict()['medium']
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
interface_dict[interface]['medium'] = medium
continue
m = p8_1.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
encapsulation = encapsulation.replace("802.1q virtual lan","dot1q")
first_dot1q = str(m.groupdict()['first_dot1q'])
medium = m.groupdict()['medium']
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
interface_dict[interface]['encapsulations']\
['first_dot1q'] = first_dot1q
interface_dict[interface]['medium'] = medium
continue
m = p8_2.match(line)
if m:
encapsulation = m.groupdict()['encapsulation'].lower()
if 'encapsulations' not in interface_dict[interface]:
interface_dict[interface]['encapsulations'] = {}
interface_dict[interface]['encapsulations']\
['encapsulation'] = encapsulation
continue
m = p9.match(line)
if m:
port_mode = m.groupdict()['port_mode']
interface_dict[interface]['port_mode'] = port_mode
continue
m = p10_1.match(line)
if m:
continue
m = p10.match(line)
if m:
duplex_mode = m.groupdict()['duplex_mode'].lower()
port_speed = m.groupdict()['port_speed']
if m.groupdict()['media_type']:
interface_dict[interface]['media_type'] = m.groupdict()['media_type']
else:
media_type = None
interface_dict[interface]['duplex_mode'] = duplex_mode
interface_dict[interface]['port_speed'] = port_speed
continue
m = p11.match(line)
if m:
beacon = m.groupdict()['beacon']
interface_dict[interface]['beacon'] = beacon
continue
m = p12.match(line)
if m:
auto_negotiation = m.groupdict()['auto_negotiate']
interface_dict[interface]['auto_negotiate'] = False
continue
m = p12_1.match(line)
if m:
auto_negotiation = m.groupdict()['auto_negotiate']
interface_dict[interface]['auto_negotiate'] = True
continue
m = p13.match(line)
if m:
receive = m.groupdict()['receive']
send = m.groupdict()['send']
if 'flow_control' not in interface_dict[interface]:
interface_dict[interface]['flow_control'] = {}
interface_dict[interface]['flow_control']['receive'] = False
interface_dict[interface]['flow_control']['send'] = False
continue
m = p13_1.match(line)
if m:
receive = m.groupdict()['receive']
send = m.groupdict()['send']
if 'flow_control' not in interface_dict[interface]:
interface_dict[interface]['flow_control'] = {}
interface_dict[interface]['flow_control']['receive'] = True
interface_dict[interface]['flow_control']['send'] = True
continue
m = p14.match(line)
if m:
auto_mdix = m.groupdict()['auto_mdix']
interface_dict[interface]['auto_mdix'] = auto_mdix
continue
m = p15.match(line)
if m:
switchport_monitor = m.groupdict()['switchport_monitor']
interface_dict[interface]['switchport_monitor'] = switchport_monitor
continue
m = p16.match(line)
if m:
ethertype = m.groupdict()['ethertype']
interface_dict[interface]['ethertype'] = ethertype
continue
m = p38.match(line)
if m:
port_channel_member_intfs = m.groupdict()['port_channel_member_intfs']
if port_channel_member_intfs:
if 'port_channel' not in interface_dict[interface]:
interface_dict[interface]['port_channel'] = {}
interface_dict[interface]['port_channel']\
['port_channel_member'] = True
interface_dict[interface]['port_channel']\
['port_channel_member_intfs'] = [Common.convert_intf_name(item) \
for item in port_channel_member_intfs.split(',')]
continue
m = p17.match(line)
if m:
efficient_ethernet = m.groupdict()['efficient_ethernet']
interface_dict[interface]['efficient_ethernet'] = efficient_ethernet
continue
m = p18.match(line)
if m:
last_link_flapped = m.groupdict()['last_link_flapped']
interface_dict[interface]['last_link_flapped']\
= last_link_flapped
continue
m = p19.match(line)
if m:
last_clear = m.groupdict()['last_clear']
continue
m = p19_1.match(line)
if m:
last_clear = m.groupdict()['last_clear']
continue
m = p20.match(line)
if m:
interface_reset = int(m.groupdict()['interface_reset'])
interface_dict[interface]['interface_reset'] = interface_reset
continue
m = p21.match(line)
if m:
load_interval = int(m.groupdict()['load_interval'])
in_rate = int(m.groupdict()['in_rate'])
in_rate_pkts = int(m.groupdict()['in_rate_pkts'])
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
if 'rate' not in interface_dict[interface]['counters']:
interface_dict[interface]['counters']['rate'] = {}
interface_dict[interface]['counters']['rate']\
['load_interval'] = load_interval
interface_dict[interface]['counters']['rate']\
['in_rate'] = in_rate
interface_dict[interface]['counters']['rate']\
['in_rate_pkts'] = in_rate_pkts
continue
m = p22.match(line)
if m:
load_interval = int(m.groupdict()['load_interval'])
out_rate = int(m.groupdict()['out_rate'])
out_rate_pkts = int(m.groupdict()['out_rate_pkts'])
interface_dict[interface]['counters']['rate']\
['load_interval'] = load_interval
interface_dict[interface]['counters']['rate']\
['out_rate'] = out_rate
interface_dict[interface]['counters']['rate']\
['out_rate_pkts'] = out_rate_pkts
continue
m = p23.match(line)
if m:
in_rate_bps = int(m.groupdict()['in_rate_bps'])
in_rate_pps = int(m.groupdict()['in_rate_pps'])
out_rate_bps = int(m.groupdict()['out_rate_bps'])
out_rate_pps = int(m.groupdict()['out_rate_pps'])
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
if 'rate' not in interface_dict[interface]['counters']:
interface_dict[interface]['counters']['rate'] = {}
interface_dict[interface]['counters']['rate']\
['in_rate_bps'] = in_rate_bps
interface_dict[interface]['counters']['rate']\
['in_rate_pps'] = in_rate_pps
interface_dict[interface]['counters']['rate']\
['out_rate_bps'] = out_rate_bps
interface_dict[interface]['counters']['rate']\
['out_rate_pps'] = out_rate_pps
continue
m = p23_1.match(line)
if m:
rx = m.groupdict()['rx']
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['rx'] = True
continue
if rx:
m = p24.match(line)
if m:
in_unicast_pkts = int(m.groupdict()['in_unicast_pkts'])
in_multicast_pkts = int(m.groupdict()['in_multicast_pkts'])
in_broadcast_pkts = int(m.groupdict()['in_broadcast_pkts'])
interface_dict[interface]['counters']['in_unicast_pkts'] = in_unicast_pkts
interface_dict[interface]['counters']['in_multicast_pkts'] = in_multicast_pkts
interface_dict[interface]['counters']['in_broadcast_pkts'] = in_broadcast_pkts
try:
interface_dict[interface]['counters']['last_clear'] = last_clear
except Exception:
pass
continue
m = p25.match(line)
if m:
group = m.groupdict()
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['in_pkts'] = int(group['in_pkts'])
if group['in_octets']:
interface_dict[interface]['counters']['in_octets'] = int(group['in_octets'])
if group['in_unicast_pkts']:
interface_dict[interface]['counters']['in_unicast_pkts'] = int(group['in_unicast_pkts'])
if group['in_multicast_pkts']:
interface_dict[interface]['counters']['in_multicast_pkts'] = int(group['in_multicast_pkts'])
continue
m = p39.match(line)
if m:
in_octets = int(m.groupdict()['in_octets'])
interface_dict[interface]['counters']['in_octets'] = in_octets
in_broadcast_pkts = int(m.groupdict()['in_broadcast_pkts'])
interface_dict[interface]['counters']['in_broadcast_pkts'] = in_broadcast_pkts
m = p26.match(line)
if m:
in_jumbo_packets = int(m.groupdict()['in_jumbo_packets'])
in_storm_suppression_packets = int(m.groupdict()['in_storm_suppression_packets'])
interface_dict[interface]['counters']['in_jumbo_packets']= in_jumbo_packets
interface_dict[interface]['counters']\
['in_storm_suppression_packets'] = in_storm_suppression_packets
continue
m = p27.match(line)
if m:
interface_dict[interface]['counters']['in_runts'] = int(m.groupdict()['in_runts'])
interface_dict[interface]['counters']['in_oversize_frame'] = int(m.groupdict()['in_oversize_frame'])
interface_dict[interface]['counters']['in_crc_errors'] = int(m.groupdict()['in_crc_errors'])
interface_dict[interface]['counters']['in_no_buffer'] = int(m.groupdict()['in_no_buffer'])
continue
m = p28.match(line)
if m:
interface_dict[interface]['counters']['in_errors'] = int(m.groupdict()['in_errors'])
interface_dict[interface]['counters']['in_short_frame'] = int(m.groupdict()['in_short_frame'])
interface_dict[interface]['counters']['in_overrun'] = int(m.groupdict()['in_overrun'])
interface_dict[interface]['counters']['in_underrun'] = int(m.groupdict()['in_underrun'])
interface_dict[interface]['counters']['in_ignored'] = int(m.groupdict()['in_ignored'])
continue
m = p29.match(line)
if m:
interface_dict[interface]['counters']['in_watchdog'] = int(m.groupdict()['in_watchdog'])
interface_dict[interface]['counters']['in_bad_etype_drop'] = int(m.groupdict()['in_bad_etype_drop'])
interface_dict[interface]['counters']['in_unknown_protos'] = int(m.groupdict()['in_unknown_protos'])
interface_dict[interface]['counters']['in_if_down_drop'] = int(m.groupdict()['in_if_down_drop'])
continue
m = p30.match(line)
if m:
in_with_dribble = int(m.groupdict()['in_with_dribble'])
in_discard = int(m.groupdict()['in_discard'])
interface_dict[interface]['counters']['in_with_dribble'] = in_with_dribble
interface_dict[interface]['counters']['in_discard'] = in_discard
continue
m = p31.match(line)
if m:
in_mac_pause_frames = int(m.groupdict()['in_mac_pause_frames'])
interface_dict[interface]['counters']['in_mac_pause_frames'] = in_mac_pause_frames
continue
m = p31_1.match(line)
if m:
rx = False
tx = m.groupdict()['tx']
if 'counters' not in interface_dict[interface]:
interface_dict[interface]['counters'] = {}
interface_dict[interface]['counters']['tx'] = True
continue
if tx:
m = p32.match(line)
if m:
interface_dict[interface]['counters']['out_unicast_pkts'] = int(m.groupdict()['out_unicast_pkts'])
interface_dict[interface]['counters']['out_multicast_pkts'] = int(m.groupdict()['out_multicast_pkts'])
interface_dict[interface]['counters']['out_broadcast_pkts'] = int(m.groupdict()['out_broadcast_pkts'])
continue
m = p33.match(line)
if m:
out_pkts = int(m.groupdict()['out_pkts'])
out_octets = int(m.groupdict()['out_octets'])
interface_dict[interface]['counters']['out_pkts'] = out_pkts
interface_dict[interface]['counters']['out_octets'] = out_octets
continue
m = p34.match(line)
if m:
out_jumbo_packets = int(m.groupdict()['out_jumbo_packets'])
interface_dict[interface]['counters']['out_jumbo_packets'] = out_jumbo_packets
continue
m = p35.match(line)
if m:
interface_dict[interface]['counters']['out_errors'] = int(m.groupdict()['out_errors'])
interface_dict[interface]['counters']['out_collision'] = int(m.groupdict()['out_collision'])
interface_dict[interface]['counters']['out_deferred'] = int(m.groupdict()['out_deferred'])
interface_dict[interface]['counters']['out_late_collision'] = int(m.groupdict()['out_late_collision'])
continue
m = p36.match(line)
if m:
interface_dict[interface]['counters']['out_lost_carrier'] = int(m.groupdict()['out_lost_carrier'])
interface_dict[interface]['counters']['out_no_carrier'] = int(m.groupdict()['out_no_carrier'])
interface_dict[interface]['counters']['out_babble'] = int(m.groupdict()['out_babble'])
interface_dict[interface]['counters']['out_discard'] = int(m.groupdict()['out_discard'])
continue
m = p37.match(line)
if m:
out_mac_pause_frames = int(m.groupdict()['out_mac_pause_frames'])
interface_dict[interface]['counters']['out_mac_pause_frames'] = out_mac_pause_frames
continue
return interface_dict
class ShowIpInterfaceVrfAllSchema(MetaParser):
schema = {
Any():
{'vrf': str,
'interface_status': str,
'iod': int,
Optional('ipv4'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('secondary'): bool,
Optional('route_tag'): str,
Optional('ip_subnet'): str,
Optional('broadcast_address'): str,
Optional('route_preference'): str,
},
Optional('unnumbered'):
{'interface_ref': str,
},
'counters':
{'unicast_packets_sent': int,
'unicast_packets_received': int,
'unicast_packets_forwarded': int,
'unicast_packets_originated': int,
'unicast_packets_consumed': int,
'unicast_bytes_sent': int,
'unicast_bytes_received': int,
'unicast_bytes_forwarded': int,
'unicast_bytes_originated': int,
'unicast_bytes_consumed': int,
'multicast_packets_sent': int,
'multicast_packets_received': int,
'multicast_packets_forwarded': int,
'multicast_packets_originated': int,
'multicast_packets_consumed': int,
'multicast_bytes_sent': int,
'multicast_bytes_received': int,
'multicast_bytes_forwarded': int,
'multicast_bytes_originated': int,
'multicast_bytes_consumed': int,
'broadcast_packets_sent': int,
'broadcast_packets_received': int,
'broadcast_packets_forwarded': int,
'broadcast_packets_originated': int,
'broadcast_packets_consumed': int,
'broadcast_bytes_sent': int,
'broadcast_bytes_received': int,
'broadcast_bytes_forwarded': int,
'broadcast_bytes_originated': int,
'broadcast_bytes_consumed': int,
'labeled_packets_sent': int,
'labeled_packets_received': int,
'labeled_packets_forwarded': int,
'labeled_packets_originated': int,
'labeled_packets_consumed': int,
'labeled_bytes_sent': int,
'labeled_bytes_received': int,
'labeled_bytes_forwarded': int,
'labeled_bytes_originated': int,
'labeled_bytes_consumed': int,
},
},
Optional('multicast_groups'): list,
Optional('multicast_groups_address'): str,
'ip_mtu': int,
'proxy_arp': str,
'local_proxy_arp': str,
'multicast_routing': str,
'icmp_redirects': str,
'directed_broadcast': str,
Optional('ip_forwarding'): str,
'icmp_unreachable': str,
'icmp_port_unreachable': str,
'unicast_reverse_path': str,
'load_sharing': str,
'int_stat_last_reset': str,
'wccp_redirect_outbound': str,
'wccp_redirect_inbound': str,
'wccp_redirect_exclude': str
},
}
class ShowIpInterfaceVrfAll(ShowIpInterfaceVrfAllSchema):
cli_command = ['show ip interface {interface} vrf {vrf}', 'show ip interface {interface} vrf all',
'show ip interface vrf {vrf}', 'show ip interface vrf all']
exclude = [
'multicast_bytes_consumed',
'multicast_bytes_received',
'unicast_bytes_consumed',
'unicast_packets_consumed',
'unicast_bytes_originated',
'unicast_packets_originated',
'unicast_bytes_received',
'unicast_bytes_sent',
'unicast_packets_received',
'unicast_packets_sent',
'multicast_packets_consumed',
'multicast_packets_received',
'multicast_bytes_originated',
'multicast_bytes_sent',
'multicast_packets_originated',
'multicast_packets_sent',
'broadcast_bytes_consumed',
'broadcast_bytes_received',
'broadcast_packets_consumed',
'broadcast_packets_received',
'multicast_groups',
'int_stat_last_reset',
'unicast_bytes_forwarded',
'unicast_packets_forwarded',
'oil_uptime',
'iod',
'(tunnel.*)',
'multicast_groups_address']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
del interface
ip_interface_vrf_all_dict = {}
temp_intf = []
for line in out.splitlines():
line = line.rstrip()
p1 = re.compile(r'^\s*IP *Interface *Status *for *VRF'
' *(?P<vrf>\S+)$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
vrf = vrf.replace('"',"")
continue
#Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36,
p2 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\-\.]+), *Interface'
' *status: *(?P<interface_status>[a-z\-\/\s]+),'
' *iod: *(?P<iod>[0-9]+),$')
m = p2.match(line)
if m:
interface = m.groupdict()['interface']
interface_status = m.groupdict()['interface_status']
iod = int(m.groupdict()['iod'])
if interface not in ip_interface_vrf_all_dict:
ip_interface_vrf_all_dict[interface] = {}
ip_interface_vrf_all_dict[interface]['interface_status']\
= interface_status
ip_interface_vrf_all_dict[interface]['iod'] = iod
ip_interface_vrf_all_dict[interface]['vrf'] = vrf
#init multicast groups list to empty for this interface
multicast_groups = []
unnumbered_intf = None
# unnumbered interface didn't share the same information
temp_intf = None
# check if the ipv4 and address already assgined during the unnumbered block
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
for key in ip_interface_vrf_all_dict[interface]['ipv4'].keys():
if re.match('^\d+.\d+.\d+.\d+\/\d+', key):
address = key
continue
# Unnumbered interfaces of loopback0: first iod 46
p2_1 = re.compile(r'^\s*Unnumbered +interfaces +of +(?P<unnumbered_intf>[\w\.\/]+): *'
'first +iod +(?P<first_iod>\d+)$')
m = p2_1.match(line)
if m:
unnumbered_intf = m.groupdict()['unnumbered_intf']
continue
# Ethernet2/11:
# mti18: tunnel-te11: tunnel-te12:
p2_2 = re.compile(r'(([E|e]thernet|[L|l]oopback|[T|t]unnel|[V|v]lan|mti|[t|T]unnel-te|[p|P]ort-channel)[\d\/\.]+):')
m = p2_2.findall(line)
if m and unnumbered_intf:
temp_intf = []
temp_intf = [i[0] for i in m]
for intf in temp_intf:
if intf not in ip_interface_vrf_all_dict:
ip_interface_vrf_all_dict[intf] = {}
continue
# IP address: 10.4.4.4, IP subnet: 10.4.4.0/24 secondary
# IP address: 10.64.4.4, IP subnet: 10.64.4.0/24
p3 = re.compile(r'^\s*IP *address: *(?P<ip>[0-9\.]+), *IP'
' *subnet: *(?P<ip_subnet>[a-z0-9\.]+)\/'
'(?P<prefix_length>[0-9]+)'
' *(?P<secondary>(secondary))?$')
m = p3.match(line)
if m:
ip = m.groupdict()['ip']
ip_subnet = m.groupdict()['ip_subnet']
prefix_length = m.groupdict()['prefix_length']
secondary = m.groupdict()['secondary']
address = ip + '/' + prefix_length
if temp_intf:
temp_intf.append(interface)
intf_lst = temp_intf
else:
intf_lst = [interface]
for intf in intf_lst:
if 'ipv4' not in ip_interface_vrf_all_dict[intf]:
ip_interface_vrf_all_dict[intf]['ipv4'] = {}
if address not in ip_interface_vrf_all_dict[intf]['ipv4']:
ip_interface_vrf_all_dict[intf]['ipv4'][address] = {}
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip'] = ip
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip_subnet'] = ip_subnet
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['prefix_length'] = prefix_length
if secondary:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['secondary'] = True
else:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['secondary'] = False
continue
# IP address: 192.168.106.1, IP subnet: 192.168.106.0/24 route-preference: 0, tag: 0
p3_1 = re.compile(r'^\s*IP *address: *(?P<ip>[0-9\.]+), *IP'
' *subnet: *(?P<ip_subnet>[a-z0-9\.]+)\/'
'(?P<prefix_length>[0-9\,]+)(?: *route-preference:'
' *(?P<route_preference>[0-9]+),)?(?: *tag:'
' *(?P<route_tag>[0-9]+))?$')
m = p3_1.match(line)
if m:
ip = m.groupdict()['ip']
ip_subnet = m.groupdict()['ip_subnet']
prefix_length = m.groupdict()['prefix_length']
route_tag = m.groupdict()['route_tag']
route_preference = m.groupdict()['route_preference']
address = ip + '/' + prefix_length
if temp_intf:
temp_intf.append(interface)
intf_lst = temp_intf
# unnumbered interface didn't share the same information
temp_intf = None
else:
intf_lst = [interface]
for intf in intf_lst:
if 'ipv4' not in ip_interface_vrf_all_dict[intf]:
ip_interface_vrf_all_dict[intf]['ipv4'] = {}
if address not in ip_interface_vrf_all_dict[intf]['ipv4']:
ip_interface_vrf_all_dict[intf]['ipv4'][address] = {}
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip'] = ip
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['ip_subnet'] = ip_subnet
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['prefix_length'] = prefix_length
if route_tag:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['route_tag'] = route_tag
if route_preference:
ip_interface_vrf_all_dict[intf]['ipv4'][address]\
['route_preference'] = route_preference
continue
#IP broadcast address: 255.255.255.255
p4 = re.compile(r'^\s*IP *broadcast *address:'
' *(?P<broadcast_address>[0-9\.]+)$')
m = p4.match(line)
if m:
broadcast_address = str(m.groupdict()['broadcast_address'])
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
ip_interface_vrf_all_dict[interface]['ipv4'][address]['broadcast_address'] = broadcast_address
continue
#IP multicast groups locally joined: none
#224.0.0.6 224.0.0.5 224.0.0.2
p5 = re.compile(r'^\s*IP *multicast *groups *locally *joined:'
' *(?P<multicast_groups_address>[a-z]+)$')
m = p5.match(line)
if m:
multicast_groups_address = m.groupdict()['multicast_groups_address']
ip_interface_vrf_all_dict[interface]['multicast_groups_address']\
= multicast_groups_address
continue
#224.0.0.6 224.0.0.5 224.0.0.2
p5_1 = re.compile(r'^\s*(?P<multicast_groups_address>[a-z0-9\.\s]+)$')
m = p5_1.match(line)
if m:
multicast_groups_address = str(m.groupdict()['multicast_groups_address'])
#Split string of addressed into a list
multicast_groups_address = [str(i) for i in multicast_groups_address.split()]
#Add to previous created list
for mgroup in multicast_groups_address:
multicast_groups.append(mgroup)
ip_interface_vrf_all_dict[interface]['multicast_groups']\
= sorted(multicast_groups)
continue
#IP MTU: 1600 bytes (using link MTU)
p6 = re.compile(r'^\s*IP *MTU: *(?P<ip_mtu>[0-9]+)'
' *bytes *\(using *link *MTU\)$')
m = p6.match(line)
if m:
ip_mtu = int(m.groupdict()['ip_mtu'])
ip_interface_vrf_all_dict[interface]['ip_mtu'] = ip_mtu
continue
#IP primary address route-preference: 0, tag: 0
p7 = re.compile(r'^\s*IP *primary *address *route-preference:'
' *(?P<route_preference>[0-9]+), *tag:'
' *(?P<route_tag>[0-9]+)$')
m = p7.match(line)
if m:
route_preference = m.groupdict()['route_preference']
route_tag = m.groupdict()['route_tag']
if route_preference:
ip_interface_vrf_all_dict[interface]['ipv4'][address]['route_preference']\
= route_preference
if route_tag:
ip_interface_vrf_all_dict[interface]['ipv4'][address]\
['route_tag'] = route_tag
continue
#IP proxy ARP : disabled
p8 = re.compile(r'^\s*IP *proxy *ARP *: *(?P<proxy_arp>[a-z]+)$')
m = p8.match(line)
if m:
proxy_arp = m.groupdict()['proxy_arp']
ip_interface_vrf_all_dict[interface]['proxy_arp'] = proxy_arp
continue
#IP Local Proxy ARP : disabled
p9 = re.compile(r'^\s*IP *Local *Proxy *ARP *:'
' *(?P<local_proxy_arp>[a-z]+)$')
m = p9.match(line)
if m:
local_proxy_arp = m.groupdict()['local_proxy_arp']
ip_interface_vrf_all_dict[interface]['local_proxy_arp']\
= local_proxy_arp
continue
#IP multicast routing: disabled
p10 = re.compile(r'^\s*IP *multicast *routing:'
' *(?P<multicast_routing>[a-z]+)$')
m = p10.match(line)
if m:
multicast_routing = m.groupdict()['multicast_routing']
ip_interface_vrf_all_dict[interface]['multicast_routing']\
= multicast_routing
continue
#IP icmp redirects: disabled
p11 = re.compile(r'^\s*IP *icmp *redirects:'
' *(?P<icmp_redirects>[a-z]+)$')
m = p11.match(line)
if m:
icmp_redirects = m.groupdict()['icmp_redirects']
ip_interface_vrf_all_dict[interface]['icmp_redirects']\
= icmp_redirects
continue
#IP directed-broadcast: disabled
p12 = re.compile(r'^\s*IP directed-broadcast:'
' *(?P<directed_broadcast>[a-z]+)$')
m = p12.match(line)
if m:
directed_broadcast = m.groupdict()['directed_broadcast']
ip_interface_vrf_all_dict[interface]['directed_broadcast']\
= directed_broadcast
continue
#IP Forwarding: disabled
p13 = re.compile(r'^\s*IP *Forwarding: *(?P<ip_forwarding>[a-z]+)$')
m = p13.match(line)
if m:
ip_forwarding = m.groupdict()['ip_forwarding']
ip_interface_vrf_all_dict[interface]['ip_forwarding']\
= ip_forwarding
continue
#IP icmp unreachables (except port): disabled
p14 = re.compile(r'^\s*IP *icmp *unreachables *\(except *port\):'
' *(?P<icmp_unreachable>[a-z]+)$')
m = p14.match(line)
if m:
icmp_unreachable = m.groupdict()['icmp_unreachable']
ip_interface_vrf_all_dict[interface]['icmp_unreachable']\
= icmp_unreachable
continue
#IP icmp port-unreachable: enabled
p15 = re.compile(r'^\s*IP *icmp *port-unreachable:'
' *(?P<icmp_port_unreachable>[a-z]+)$')
m = p15.match(line)
if m:
icmp_port_unreachable = m.groupdict()['icmp_port_unreachable']
ip_interface_vrf_all_dict[interface]['icmp_port_unreachable']\
= icmp_port_unreachable
continue
#IP unicast reverse path forwarding: none
p16 = re.compile(r'^\s*IP *unicast *reverse *path *forwarding:'
' *(?P<unicast_reverse_path>\w+)$')
m = p16.match(line)
if m:
unicast_reverse_path = m.groupdict()['unicast_reverse_path']
ip_interface_vrf_all_dict[interface]['unicast_reverse_path']\
= unicast_reverse_path
continue
#IP load sharing: none
p17 = re.compile(r'^\s*IP *load *sharing: *(?P<load_sharing>\w+)$')
m = p17.match(line)
if m:
load_sharing = m.groupdict()['load_sharing']
ip_interface_vrf_all_dict[interface]['load_sharing']\
= load_sharing
continue
#IP interface statistics last reset: never
p18 = re.compile(r'^\s*IP *interface *statistics *last *reset:'
' *(?P<int_stat_last_reset>[a-zA-Z0-9\:]+)')
m = p18.match(line)
if m:
int_stat_last_reset = m.groupdict()['int_stat_last_reset']
ip_interface_vrf_all_dict[interface]['int_stat_last_reset']\
= int_stat_last_reset
continue
# IP interface software stats: (sent/received/forwarded/originated/consumed)
# Unicast packets : 0/0/0/0/0
# Unicast bytes : 0/0/0/0/0
# Multicast packets : 0/0/0/0/0
# Multicast bytes : 0/0/0/0/0
# Broadcast packets : 0/0/0/0/0
# Broadcast bytes : 0/0/0/0/0
# Labeled packets : 0/0/0/0/0
# Labeled bytes : 0/0/0/0/0
try:
interface
except Exception:
continue
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
#Unicast packets : 0/0/0/0/0
p20 = re.compile(r'^\s*Unicast *packets *:'
' *(?P<unicast_packets_sent>[0-9]+)\/'
'(?P<unicast_packets_received>[0-9]+)\/'
'(?P<unicast_packets_forwarded>[0-9]+)\/'
'(?P<unicast_packets_originated>[0-9]+)\/'
'(?P<unicast_packets_consumed>[0-9]+)$')
m = p20.match(line)
if m:
if 'counters' not in ip_interface_vrf_all_dict[interface]['ipv4'][address]:
ip_interface_vrf_all_dict[interface]['ipv4']['counters'] = {}
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_sent']= int(m.groupdict()['unicast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_received']= int(m.groupdict()['unicast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_forwarded']= int(m.groupdict()['unicast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_originated']= int(m.groupdict()['unicast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_packets_consumed']= int(m.groupdict()['unicast_packets_consumed'])
continue
#Unicast bytes : 0/0/0/0/0
p21 = re.compile(r'^\s*Unicast *bytes *:'
' *(?P<unicast_bytes_sent>[0-9]+)\/'
'(?P<unicast_bytes_received>[0-9]+)\/'
'(?P<unicast_bytes_forwarded>[0-9]+)\/'
'(?P<unicast_bytes_originated>[0-9]+)\/'
'(?P<unicast_bytes_consumed>[0-9]+)$')
m = p21.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_sent']= int(m.groupdict()['unicast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_received']= int(m.groupdict()['unicast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_forwarded']= int(m.groupdict()['unicast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_originated']= int(m.groupdict()['unicast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['unicast_bytes_consumed']= int(m.groupdict()['unicast_bytes_consumed'])
continue
#Multicast packets : 0/0/0/0/0
p22 = re.compile(r'^\s*Multicast *packets *:'
' *(?P<multicast_packets_sent>[0-9]+)\/'
'(?P<multicast_packets_received>[0-9]+)\/'
'(?P<multicast_packets_forwarded>[0-9]+)\/'
'(?P<multicast_packets_originated>[0-9]+)\/'
'(?P<multicast_packets_consumed>[0-9]+)$')
m = p22.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_sent']= int(m.groupdict()['multicast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_received']= int(m.groupdict()['multicast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_forwarded']= int(m.groupdict()['multicast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_originated']= int(m.groupdict()['multicast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_packets_consumed']= int(m.groupdict()['multicast_packets_consumed'])
continue
#Multicast bytes : 0/0/0/0/0
p23 = re.compile(r'^\s*Multicast *bytes *:'
' *(?P<multicast_bytes_sent>[0-9]+)\/'
'(?P<multicast_bytes_received>[0-9]+)\/'
'(?P<multicast_bytes_forwarded>[0-9]+)\/'
'(?P<multicast_bytes_originated>[0-9]+)\/'
'(?P<multicast_bytes_consumed>[0-9]+)$')
m = p23.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_sent']= int(m.groupdict()['multicast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_received']= int(m.groupdict()['multicast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_forwarded']= int(m.groupdict()['multicast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_originated']= int(m.groupdict()['multicast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['multicast_bytes_consumed']= int(m.groupdict()['multicast_bytes_consumed'])
continue
#Broadcast packets : 0/0/0/0/0
p24 = re.compile(r'^\s*Broadcast *packets *:'
' *(?P<broadcast_packets_sent>[0-9]+)\/'
'(?P<broadcast_packets_received>[0-9]+)\/'
'(?P<broadcast_packets_forwarded>[0-9]+)\/'
'(?P<broadcast_packets_originated>[0-9]+)\/'
'(?P<broadcast_packets_consumed>[0-9]+)$')
m = p24.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_sent']= int(m.groupdict()['broadcast_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_received']= int(m.groupdict()['broadcast_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_forwarded']= int(m.groupdict()['broadcast_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_originated']= int(m.groupdict()['broadcast_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_packets_consumed']= int(m.groupdict()['broadcast_packets_consumed'])
continue
#Broadcast bytes : 0/0/0/0/0
p25 = re.compile(r'^\s*Broadcast *bytes *:'
' *(?P<broadcast_bytes_sent>[0-9]+)\/'
'(?P<broadcast_bytes_received>[0-9]+)\/'
'(?P<broadcast_bytes_forwarded>[0-9]+)\/'
'(?P<broadcast_bytes_originated>[0-9]+)\/'
'(?P<broadcast_bytes_consumed>[0-9]+)$')
m = p25.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_sent']= int(m.groupdict()['broadcast_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_received']= int(m.groupdict()['broadcast_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_forwarded']= int(m.groupdict()['broadcast_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_originated']= int(m.groupdict()['broadcast_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['broadcast_bytes_consumed']= int(m.groupdict()['broadcast_bytes_consumed'])
continue
#Labeled packets : 0/0/0/0/0
p26 = re.compile(r'^\s*Labeled *packets *:'
' *(?P<labeled_packets_sent>[0-9]+)\/'
'(?P<labeled_packets_received>[0-9]+)\/'
'(?P<labeled_packets_forwarded>[0-9]+)\/'
'(?P<labeled_packets_originated>[0-9]+)\/'
'(?P<labeled_packets_consumed>[0-9]+)$')
m = p26.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_sent']= int(m.groupdict()['labeled_packets_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_received']= int(m.groupdict()['labeled_packets_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_forwarded']= int(m.groupdict()['labeled_packets_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_originated']= int(m.groupdict()['labeled_packets_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_packets_consumed']= int(m.groupdict()['labeled_packets_consumed'])
continue
#Labeled bytes : 0/0/0/0/0
p27 = re.compile(r'^\s*Labeled *bytes *:'
' *(?P<labeled_bytes_sent>[0-9]+)\/'
'(?P<labeled_bytes_received>[0-9]+)\/'
'(?P<labeled_bytes_forwarded>[0-9]+)\/'
'(?P<labeled_bytes_originated>[0-9]+)\/'
'(?P<labeled_bytes_consumed>[0-9]+)$')
m = p27.match(line)
if m:
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_sent']= int(m.groupdict()['labeled_bytes_sent'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_received']= int(m.groupdict()['labeled_bytes_received'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_forwarded']= int(m.groupdict()['labeled_bytes_forwarded'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_originated']= int(m.groupdict()['labeled_bytes_originated'])
ip_interface_vrf_all_dict[interface]['ipv4']['counters']\
['labeled_bytes_consumed']= int(m.groupdict()['labeled_bytes_consumed'])
continue
#WCCP Redirect outbound: disabled
p28 = re.compile(r'^\s*WCCP *Redirect *outbound:'
' *(?P<wccp_redirect_outbound>[a-z]+)$')
m = p28.match(line)
if m:
wccp_redirect_outbound = m.groupdict()['wccp_redirect_outbound']
ip_interface_vrf_all_dict[interface]['wccp_redirect_outbound']\
= wccp_redirect_outbound
continue
#WCCP Redirect inbound: disabled
p29 = re.compile(r'^\s*WCCP *Redirect *inbound:'
' *(?P<wccp_redirect_inbound>[a-z]+)$')
m = p29.match(line)
if m:
wccp_redirect_inbound = m.groupdict()['wccp_redirect_inbound']
ip_interface_vrf_all_dict[interface]['wccp_redirect_inbound']\
= wccp_redirect_inbound
continue
#WCCP Redirect exclude: disabled
p30 = re.compile(r'^\s*WCCP *Redirect *exclude:'
' *(?P<wccp_redirect_exclude>[a-z]+)$')
m = p30.match(line)
if m:
wccp_redirect_exclude = m.groupdict()['wccp_redirect_exclude']
ip_interface_vrf_all_dict[interface]['wccp_redirect_exclude']\
= wccp_redirect_exclude
continue
# IP unnumbered interface (loopback0)
p31 = re.compile(r'^\s*IP +unnumbered +interface +\((?P<unnum_intf>[\w\/\.]+)\)$')
m = p31.match(line)
if m:
unnum_intf = m.groupdict()['unnum_intf']
if 'ipv4' in ip_interface_vrf_all_dict[interface]:
ip_interface_vrf_all_dict[interface]['ipv4']['unnumbered'] = {}
ip_interface_vrf_all_dict[interface]['ipv4']['unnumbered']['interface_ref']\
= unnum_intf
continue
return ip_interface_vrf_all_dict
# ===================================
# Schema for 'show vrf all interface'
# ===================================
class ShowVrfAllInterfaceSchema(MetaParser):
schema = {
Any():
{'vrf': str,
'vrf_id': int,
'site_of_origin': str
},
}
# ===================================
# Parser for 'show vrf all interface'
# ===================================
class ShowVrfAllInterface(ShowVrfAllInterfaceSchema):
cli_command = ['show vrf {vrf} interface {interface}',
'show vrf all interface {interface}',
'show vrf {vrf} interface', 'show vrf all interface']
exclude = [
'(Null.*)']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
vrf_all_interface_dict = {}
for line in out.splitlines():
line = line.rstrip()
# Interface VRF-Name VRF-ID Site-of-Origin
# Ethernet2/1 VRF1 3 --
# Null0 default 1 --
# Ethernet2/1.10 default 1 --
# Ethernet2/1.20 default 1 --
# Ethernet2/4 default 1 --
# Ethernet2/5 default 1 --
# Ethernet2/6 default 1 --
p1 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\.\/]+)'
' *(?P<vrf>[a-zA-Z0-9]+)'
' *(?P<vrf_id>[0-9]+)'
' *(?P<site_of_origin>[a-zA-Z\-]+)$')
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
vrf = m.groupdict()['vrf']
vrf_id = int(m.groupdict()['vrf_id'])
site_of_origin = m.groupdict()['site_of_origin']
if interface not in vrf_all_interface_dict:
vrf_all_interface_dict[interface] = {}
vrf_all_interface_dict[interface]['vrf'] = vrf
vrf_all_interface_dict[interface]['vrf_id'] = vrf_id
vrf_all_interface_dict[interface]\
['site_of_origin'] = site_of_origin
return vrf_all_interface_dict
# ======================================
# Schema for 'show interface switchport'
# ======================================
class ShowInterfaceSwitchportSchema(MetaParser):
schema = {
Any():
{'switchport_status': str,
Optional('switchport_monitor'): str,
Optional('switchport_mode'): str,
Optional('access_vlan'): int,
'switchport_enable': bool,
Optional('access_vlan_mode'): str,
Optional('native_vlan'): int,
Optional('native_vlan_mode'): str,
Optional('trunk_vlans'): str,
Optional('admin_priv_vlan_primary_host_assoc'): str,
Optional('admin_priv_vlan_secondary_host_assoc'): str,
Optional('admin_priv_vlan_primary_mapping'): str,
Optional('admin_priv_vlan_secondary_mapping'): str,
Optional('admin_priv_vlan_trunk_native_vlan'): str,
Optional('admin_priv_vlan_trunk_encapsulation'): str,
Optional('admin_priv_vlan_trunk_normal_vlans'): str,
Optional('admin_priv_vlan_trunk_private_vlans'): str,
Optional('operational_private_vlan'): str
},
}
# ======================================
# Parser for 'show interface switchport'
# ======================================
class ShowInterfaceSwitchport(ShowInterfaceSwitchportSchema):
cli_command =['show interface switchport', 'show interface {interface} switchport']
def cli(self, interface="", output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
interface_switchport_dict = {}
for line in out.splitlines():
line = line.rstrip()
#Name: Ethernet2/2
p1 = re.compile(r'^\s*Name: *(?P<interface>[a-zA-Z0-9\/\-\.]+)$')
m = p1.match(line)
if m:
interface = m.groupdict()['interface']
if interface not in interface_switchport_dict:
interface_switchport_dict[interface] = {}
continue
#Switchport: Enabled
p2 = re.compile(r'^\s*Switchport: *(?P<switchport_status>[a-zA-Z\s]+)$')
m = p2.match(line)
if m:
switchport_status = m.groupdict()['switchport_status'].lower()
interface_switchport_dict[interface]['switchport_status'] = switchport_status
interface_switchport_dict[interface]['switchport_enable'] = True \
if 'enable' in switchport_status else False
continue
#Switchport Monitor: Not enabled
p3 = re.compile(r'^\s*Switchport *Monitor: *(?P<switchport_monitor>[a-zA-Z\s]+)$')
m = p3.match(line)
if m:
switchport_monitor = m.groupdict()['switchport_monitor']
interface_switchport_dict[interface]['switchport_monitor'] = switchport_monitor
continue
# Operational Mode: Private-vlan host
p4 = re.compile(r'^\s*Operational *Mode: *(?P<switchport_mode>[\w\s-]+)$')
m = p4.match(line)
if m:
interface_switchport_dict[interface]['switchport_mode'] = m.groupdict()['switchport_mode']
continue
# Access Mode VLAN: 1 (default)
# Access Mode VLAN: 7 (server-vlan7)
# Access Mode VLAN: 551 (Test_VM_192.168.1.0/24)
p5 = re.compile(r'^\s*Access *Mode *VLAN: *(?P<access_vlan>[0-9]+)'
'(?: *\((?P<access_vlan_mode>[\S\s]+)\))?$')
m = p5.match(line)
if m:
access_vlan = int(m.groupdict()['access_vlan'])
access_vlan_mode = m.groupdict()['access_vlan_mode']
interface_switchport_dict[interface]\
['access_vlan'] = access_vlan
interface_switchport_dict[interface]\
['access_vlan_mode'] = access_vlan_mode
continue
# Trunking Native Mode VLAN: 1 (default)
# Trunking Native Mode VLAN: 200 (VLAN0200)
# Trunking Native Mode VLAN: 3967 (Vlan not created)
# Trunking Native Mode VLAN: 451 (VM_Machines_192.168.1.0/24)
p6 = re.compile(r'^\s*Trunking *Native *Mode *VLAN:'
' *(?P<native_vlan>[0-9]+)'
' *\((?P<native_vlan_mode>[\S\s]+)\)$')
m = p6.match(line)
if m:
native_vlan = int(m.groupdict()['native_vlan'])
native_vlan_mode = m.groupdict()['native_vlan_mode']
interface_switchport_dict[interface]\
['native_vlan'] = native_vlan
interface_switchport_dict[interface]\
['native_vlan_mode'] = native_vlan_mode
continue
#Trunking VLANs Allowed: 100,300
p7 = re.compile(r'^\s*Trunking *VLANs *Allowed: *(?P<trunk_vlans>[0-9\,\-]+)$')
m = p7.match(line)
if m:
trunk_vlans = m.groupdict()['trunk_vlans']
interface_switchport_dict[interface]['trunk_vlans'] = trunk_vlans
continue
# Administrative private-vlan primary host-association: 2000
p8 = re.compile(r'^\s*Administrative *private-vlan *primary'
' *host-association:'
' *(?P<admin_priv_vlan_primary_host_assoc>\w+)$')
m = p8.match(line)
if m:
admin_priv_vlan_primary_host_assoc = m.groupdict()['admin_priv_vlan_primary_host_assoc']
interface_switchport_dict[interface]['admin_priv_vlan_primary_host_assoc'] = admin_priv_vlan_primary_host_assoc
continue
# Administrative private-vlan secondary host-association: 110
p9 = re.compile(r'^\s*Administrative *private-vlan *secondary'
' *host-association:'
' *(?P<admin_priv_vlan_secondary_host_assoc>\w+)$')
m = p9.match(line)
if m:
admin_priv_vlan_secondary_host_assoc\
= m.groupdict()['admin_priv_vlan_secondary_host_assoc']
interface_switchport_dict[interface]\
['admin_priv_vlan_secondary_host_assoc'] = admin_priv_vlan_secondary_host_assoc
continue
#Administrative private-vlan primary mapping: none
p10 = re.compile(r'^\s*Administrative *private-vlan *primary'
' *mapping:'
' *(?P<admin_priv_vlan_primary_mapping>\w+)$')
m = p10.match(line)
if m:
admin_priv_vlan_primary_mapping\
= m.groupdict()['admin_priv_vlan_primary_mapping']
interface_switchport_dict[interface]\
['admin_priv_vlan_primary_mapping']\
= admin_priv_vlan_primary_mapping
continue
#Administrative private-vlan secondary mapping: none
p11 = re.compile(r'^\s*Administrative *private-vlan *secondary'
' *mapping:'
' *(?P<admin_priv_vlan_secondary_mapping>\w+)$')
m = p11.match(line)
if m:
admin_priv_vlan_secondary_mapping = m.groupdict()['admin_priv_vlan_secondary_mapping']
interface_switchport_dict[interface]\
['admin_priv_vlan_secondary_mapping'] = admin_priv_vlan_secondary_mapping
continue
#Administrative private-vlan trunk native VLAN: 1
p12 = re.compile(r'^\s*Administrative *private-vlan *trunk *native'
' *VLAN:'
' *(?P<admin_priv_vlan_trunk_native_vlan>\w+)$')
m = p12.match(line)
if m:
admin_priv_vlan_trunk_native_vlan = m.groupdict()['admin_priv_vlan_trunk_native_vlan']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_native_vlan'] = admin_priv_vlan_trunk_native_vlan
continue
#Administrative private-vlan trunk encapsulation: dot1q
p13 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *encapsulation:'
' *(?P<admin_priv_vlan_trunk_encapsulation>[a-z0-9]+)$')
m = p13.match(line)
if m:
admin_priv_vlan_trunk_encapsulation = m.groupdict()['admin_priv_vlan_trunk_encapsulation']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_encapsulation'] = admin_priv_vlan_trunk_encapsulation
continue
#Administrative private-vlan trunk normal VLANs: none
p14 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *normal VLANs:'
' *(?P<admin_priv_vlan_trunk_normal_vlans>\w+)$')
m = p14.match(line)
if m:
admin_priv_vlan_trunk_normal_vlans = m.groupdict()['admin_priv_vlan_trunk_normal_vlans']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_normal_vlans'] = admin_priv_vlan_trunk_normal_vlans
continue
# Administrative private-vlan trunk private VLANs: none
# Administrative private-vlan trunk private VLANs: none(0 none)
p15 = re.compile(r'^\s*Administrative *private-vlan *trunk'
' *private VLANs:'
' *(?P<admin_priv_vlan_trunk_private_vlans>\w+)(?P<dummy>.*)?$')
m = p15.match(line)
if m:
admin_priv_vlan_trunk_private_vlans = m.groupdict()['admin_priv_vlan_trunk_private_vlans']
interface_switchport_dict[interface]\
['admin_priv_vlan_trunk_private_vlans'] = admin_priv_vlan_trunk_private_vlans
continue
# Operational private-vlan: (2500,101)
p16 = re.compile(r'^\s*Operational *private-vlan:'
' *(?P<operational_private_vlan>\S+)$')
m = p16.match(line)
if m:
operational_private_vlan = m.groupdict()['operational_private_vlan']
interface_switchport_dict[interface]\
['operational_private_vlan'] = operational_private_vlan
continue
return interface_switchport_dict
# ========================================
# Schema for 'show ipv6 interface vrf all'
# ========================================
class ShowIpv6InterfaceVrfAllSchema(MetaParser):
schema = {
Any():
{'vrf': str,
'interface_status': str,
'iod': int,
'enabled': bool,
Optional('ipv6'):
{Any():
{Optional('ip'): str,
Optional('prefix_length'): str,
Optional('anycast'): bool,
Optional('status'): str,
},
'counters':
{'unicast_packets_forwarded': int,
'unicast_packets_originated': int,
'unicast_packets_consumed': int,
'unicast_bytes_forwarded': int,
'unicast_bytes_originated': int,
'unicast_bytes_consumed': int,
'multicast_packets_forwarded': int,
'multicast_packets_originated': int,
'multicast_packets_consumed': int,
'multicast_bytes_forwarded': int,
'multicast_bytes_originated': int,
'multicast_bytes_consumed': int,
},
Optional('ipv6_subnet'): str,
'ipv6_link_local': str,
'ipv6_link_local_state': str,
'ipv6_ll_state': str,
Optional('ipv6_virtual_add'): str,
Optional('ipv6_virtual_groups'): list,
Optional('virtual_add'): bool,
Optional('multicast_groups'): bool,
'ipv6_multicast_routing': str,
'ipv6_report_link_local': str,
'ipv6_forwarding_feature': str,
Optional('ipv6_multicast_groups'): list,
Optional('ipv6_multicast_entries'): str,
'ipv6_mtu': int,
'ipv6_unicast_rev_path_forwarding': str,
'ipv6_load_sharing': str,
'ipv6_last_reset': str
},
},
}
# ========================================
# Parser for 'show ipv6 interface vrf all'
# ========================================
class ShowIpv6InterfaceVrfAll(ShowIpv6InterfaceVrfAllSchema):
cli_command = ['show ipv6 interface {interface} vrf {vrf}', 'show ipv6 interface {interface} vrf all',
'show ipv6 interface vrf {vrf}', 'show ipv6 interface vrf all']
exclude = [
'multicast_bytes_consumed',
'multicast_packets_consumed',
'multicast_bytes_originated',
'multicast_packets_originated',
'unicast_bytes_consumed',
'unicast_packets_consumed',
'unicast_bytes_originated',
'unicast_packets_originated',
'ipv6_multicast_groups',
'iod',
'multicast_groups',
'unicast_bytes_forwarded',
'unicast_packets_forwarded',
'ipv6_link_local']
def cli(self, interface='', vrf='', output=None):
if interface and vrf:
cmd = self.cli_command[0].format(interface=interface, vrf=vrf)
elif interface:
cmd = self.cli_command[1].format(interface=interface)
elif vrf:
cmd = self.cli_command[2].format(vrf=vrf)
else:
cmd = self.cli_command[3]
if output is None:
out = self.device.execute(cmd)
else:
out = output
del interface
# Init variables
ipv6_interface_dict = {}
ipv6_addresses = None
anycast_addresses = None
virtual_add = False
multicast_groups = False
for line in out.splitlines():
line = line.rstrip()
#IPv6 Interface Status for VRF "VRF1"
p1 = re.compile(r'^\s*IPv6 *Interface *Status *for *VRF'
' *(?P<vrf>\S+)$')
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
vrf = vrf.replace('"',"")
continue
p2 = re.compile(r'^\s*(?:(?P<interface>[a-zA-Z0-9\/\-\.]+)), Interface'
' *status: *(?P<interface_status>[a-z\-\/]+),'
' *iod: *(?P<iod>[0-9]+)$')
m = p2.match(line)
if m:
interface = str(m.groupdict()['interface'])
interface_status = m.groupdict()['interface_status']
iod = int(m.groupdict()['iod'])
if interface not in ipv6_interface_dict:
ipv6_interface_dict[interface] = {}
ipv6_interface_dict[interface]['iod'] = iod
ipv6_interface_dict[interface]['interface_status'] = interface_status
ipv6_interface_dict[interface]['vrf'] = vrf
ipv6_interface_dict[interface]['enabled'] = True
ipv6_multicast_groups = []
ipv6_virtual_groups = []
ipv6_multicast_entries = multicast_groups = False
continue
p3_1 = re.compile(r'^\s*IPv6 address:$')
m = p3_1.match(line)
if m:
ipv6_addresses = True
anycast_addresses = False
continue
p3_2 = re.compile(r'^\s*Anycast configured addresses:$')
m = p3_2.match(line)
if m:
anycast_addresses = True
ipv6_addresses = False
continue
p3_3 = re.compile(r'^\s*(?P<ip>[a-z0-9\:]+)'
'\/(?P<prefix_length>[0-9]+)'
' *\[(?P<status>[a-zA-Z]+)\]$')
m = p3_3.match(line)
if m:
ip = m.groupdict()['ip']
prefix_length = m.groupdict()['prefix_length']
status = m.groupdict()['status'].lower()
address = ip + '/' + prefix_length
if 'ipv6' not in ipv6_interface_dict[interface]:
ipv6_interface_dict[interface]['ipv6'] = {}
if address not in ipv6_interface_dict[interface]['ipv6']:
ipv6_interface_dict[interface]['ipv6'][address] = {}
ipv6_interface_dict[interface]['ipv6'][address]\
['ip'] = ip
ipv6_interface_dict[interface]['ipv6'][address]\
['prefix_length'] = prefix_length
if ipv6_addresses:
ipv6_interface_dict[interface]['ipv6'][address]\
['status'] = status
elif anycast_addresses:
ipv6_interface_dict[interface]['ipv6'][address]\
['anycast'] = True
continue
p4 = re.compile(r'^\s*IPv6 *subnet:'
' *(?P<ipv6_subnet>[a-z0-9\:\/]+)$')
m = p4.match(line)
if m:
ipv6_subnet = m.groupdict()['ipv6_subnet']
ipv6_interface_dict[interface]['ipv6']['ipv6_subnet'] = ipv6_subnet
continue
p5 = re.compile(r'^\s*IPv6 *link-local *address:'
' *(?P<ipv6_link_local>[a-z0-9\:\s]+)'
' *\((?P<ipv6_link_local_state>[a-z]+)\)'
' *\[(?P<ipv6_ll_state>[A-Z]+)\]$')
m = p5.match(line)
if m:
ipv6_link_local = m.groupdict()['ipv6_link_local']
ipv6_link_local_state = m.groupdict()['ipv6_link_local_state']
ipv6_ll_state = m.groupdict()['ipv6_ll_state'].lower()
if 'ipv6' not in ipv6_interface_dict[interface]:
ipv6_interface_dict[interface]['ipv6'] = {}
ipv6_interface_dict[interface]['ipv6']['ipv6_link_local'] = ipv6_link_local
ipv6_interface_dict[interface]['ipv6']['ipv6_link_local_state'] = ipv6_link_local_state
ipv6_interface_dict[interface]['ipv6']['ipv6_ll_state'] = ipv6_ll_state
continue
p6 = re.compile(r'^\s*IPv6 *virtual *addresses *configured:'
' *(?P<ipv6_virtual_add>\w+)$')
m = p6.match(line)
if m:
ipv6_virtual_add = m.groupdict()['ipv6_virtual_add']
ipv6_interface_dict[interface]['ipv6']['ipv6_virtual_add'] = ipv6_virtual_add
continue
p6_1 = re.compile(r'^\s*(IPv6 virtual *(?P<virtual_add>(addresses|address) configured:))$')
m = p6_1.match(line)
if m:
virtual_add = m.groupdict()['virtual_add']
ipv6_interface_dict[interface]['ipv6']['virtual_add'] = True
continue
if virtual_add:
p6_2 = re.compile(r'^\s*(?P<ipv6_virtual_addresses>[a-z0-9\:\s]+)$')
m = p6_2.match(line)
if m:
ipv6_virtual_addresses = str(m.groupdict()['ipv6_virtual_addresses'])
ipv6_virtual_addresses = [str(j) for j in ipv6_virtual_addresses.split()]
for add in ipv6_virtual_addresses:
ipv6_virtual_groups.append(add)
ipv6_interface_dict[interface]['ipv6']['ipv6_virtual_groups']\
= sorted(ipv6_virtual_groups)
continue
p7 = re.compile(r'^\s*IPv6 *multicast *routing:'
' *(?P<ipv6_multicast_routing>[a-z]+)$')
m = p7.match(line)
if m:
ipv6_multicast_routing = m.groupdict()['ipv6_multicast_routing']
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_routing'] = ipv6_multicast_routing
continue
p8 = re.compile(r'^\s*IPv6 *report *link *local:'
' *(?P<ipv6_report_link_local>[a-z]+)$')
m = p8.match(line)
if m:
ipv6_report_link_local = m.groupdict()['ipv6_report_link_local']
ipv6_interface_dict[interface]['ipv6']['ipv6_report_link_local']\
= ipv6_report_link_local
continue
p9 = re.compile(r'^\s*IPv6 *Forwarding *feature:'
' *(?P<ipv6_forwarding_feature>[a-z]+)$')
m = p9.match(line)
if m:
ipv6_forwarding_feature = m.groupdict()['ipv6_forwarding_feature']
ipv6_interface_dict[interface]['ipv6']['ipv6_forwarding_feature']\
= ipv6_forwarding_feature
continue
p10 = re.compile(r'^\s*(?P<multicast_groups>(IPv6 *multicast *(groups|group) *locally *joined:))$')
m = p10.match(line)
if m:
virtual_add = False
multicast_groups = m.groupdict()['multicast_groups']
ipv6_interface_dict[interface]['ipv6']['multicast_groups'] = True
continue
if multicast_groups:
p11 = re.compile(r'^\s*(?P<ipv6_multicast_group_addresses>[a-z0-9\(\)\:\s]+)$')
m = p11.match(line)
if m:
ipv6_multicast_group_addresses = str(m.groupdict()['ipv6_multicast_group_addresses'])
ipv6_multicast_group_addresses = [str(i) for i in ipv6_multicast_group_addresses.split()]
for address in ipv6_multicast_group_addresses:
ipv6_multicast_groups.append(address)
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_groups']\
= sorted(ipv6_multicast_groups)
continue
p12 = re.compile(r'^\s*IPv6 *multicast *\(S\,G\) *entries *joined:$')
m = p12.match(line)
if m:
ipv6_multicast_entries = True
continue
p12_1 = re.compile(r'^\s*\((?P<ip_list>.*)\)')
m = p12_1.match(line)
if m and ipv6_multicast_entries:
ipv6_multicast_entries = m.groupdict()['ip_list']
ipv6_interface_dict[interface]['ipv6']['ipv6_multicast_entries']\
= ipv6_multicast_entries
continue
p13 = re.compile(r'^\s*IPv6 *MTU: *(?P<ipv6_mtu>[0-9]+)'
' *\(using *link *MTU\)$')
m = p13.match(line)
if m:
ipv6_mtu = int(m.groupdict()['ipv6_mtu'])
ipv6_interface_dict[interface]['ipv6']['ipv6_mtu'] = ipv6_mtu
continue
p14 = re.compile(r'^\s*IPv6 *unicast *reverse *path *forwarding:'
' *(?P<ipv6_unicast_rev_path_forwarding>\w+)$')
m = p14.match(line)
if m:
ipv6_unicast_rev_path_forwarding = m.groupdict()\
['ipv6_unicast_rev_path_forwarding']
ipv6_interface_dict[interface]['ipv6']\
['ipv6_unicast_rev_path_forwarding']\
= ipv6_unicast_rev_path_forwarding
continue
p15 = re.compile(r'^\s*IPv6 *load *sharing:'
' *(?P<ipv6_load_sharing>\w+)$')
m = p15.match(line)
if m:
ipv6_load_sharing = m.groupdict()['ipv6_load_sharing']
ipv6_interface_dict[interface]['ipv6']['ipv6_load_sharing']\
= ipv6_load_sharing
continue
p16 = re.compile(r'^\s*IPv6 *interface *statistics *last *reset:'
' *(?P<ipv6_last_reset>[a-z]+)$')
m = p16.match(line)
if m:
ipv6_last_reset = m.groupdict()['ipv6_last_reset']
ipv6_interface_dict[interface]['ipv6']['ipv6_last_reset']\
= ipv6_last_reset
continue
p18 = re.compile(r'^\s*Unicast *packets:'
' *(?P<unicast_packets_forwarded>[0-9]+)\/'
'(?P<unicast_packets_originated>[0-9]+)\/'
'(?P<unicast_packets_consumed>[0-9]+)$')
m = p18.match(line)
if m:
if 'counters' not in ipv6_interface_dict[interface]['ipv6']:
ipv6_interface_dict[interface]['ipv6']['counters'] = {}
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_forwarded'] = int(m.groupdict()['unicast_packets_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_originated'] = int(m.groupdict()['unicast_packets_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_packets_consumed'] = int(m.groupdict()['unicast_packets_consumed'])
continue
p19 = re.compile(r'^\s*Unicast *bytes: *(?P<unicast_bytes_forwarded>[0-9]+)'
'\/(?P<unicast_bytes_originated>[0-9]+)\/'
'(?P<unicast_bytes_consumed>[0-9]+)$')
m = p19.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_forwarded'] = int(m.groupdict()['unicast_bytes_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_originated'] = int(m.groupdict()['unicast_bytes_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['unicast_bytes_consumed'] = int(m.groupdict()['unicast_bytes_consumed'])
continue
p20 = re.compile(r'^\s*Multicast *packets: *(?P<multicast_packets_forwarded>[0-9]+)'
'\/(?P<multicast_packets_originated>[0-9]+)\/'
'(?P<multicast_packets_consumed>[0-9]+)$')
m = p20.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_forwarded'] = int(m.groupdict()['multicast_packets_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_originated'] = int(m.groupdict()['multicast_packets_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_packets_consumed'] = int(m.groupdict()['multicast_packets_consumed'])
continue
p21 = re.compile(r'^\s*Multicast *bytes: *(?P<multicast_bytes_forwarded>[0-9]+)\/'
'(?P<multicast_bytes_originated>[0-9]+)\/'
'(?P<multicast_bytes_consumed>[0-9]+)$')
m = p21.match(line)
if m:
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_forwarded'] = int(m.groupdict()['multicast_bytes_forwarded'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_originated'] = int(m.groupdict()['multicast_bytes_originated'])
ipv6_interface_dict[interface]['ipv6']['counters']\
['multicast_bytes_consumed'] = int(m.groupdict()['multicast_bytes_consumed'])
continue
return ipv6_interface_dict
class ShowIpInterfaceBriefSchema(MetaParser):
schema = {'interface':
{Any():
{Optional('vlan_id'):
{Optional(Any()):
{'ip_address': str,
'interface_status': str,
Optional('ipaddress_extension'): str}
},
Optional('ip_address'): str,
Optional('interface_status'): str,
Optional('ipaddress_extension'): str}
},
}
class ShowIpInterfaceBrief(ShowIpInterfaceBriefSchema):
cli_command = 'show ip interface brief'
exclude = [
'(tunnel.*)']
def __init__ (self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cmd = self.cli_command
def cli(self, output=None):
if output is None:
out = self.device.execute(self.cmd)
else:
out = output
interface_dict = {}
for line in out.splitlines():
line = line.rstrip()
p1 = re.compile(r'^\s*Interface +IP Address +Interface Status$')
m = p1.match(line)
if m:
continue
p2 = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\.\-]+) +(?P<ip_address>[a-z0-9\.]+) +(?P<interface_status>[a-z\-\/]+)$')
m = p2.match(line)
if m:
interface = m.groupdict()['interface']
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if interface not in interface_dict['interface']:
interface_dict['interface'][interface] = {}
if 'Vlan' in interface:
vlan_id = str(int(re.search(r'\d+', interface).group()))
if 'vlan_id' not in interface_dict['interface'][interface]:
interface_dict['interface'][interface]['vlan_id'] = {}
if vlan_id not in interface_dict['interface'][interface]['vlan_id']:
interface_dict['interface'][interface]['vlan_id'][vlan_id] = {}
interface_dict['interface'][interface]['vlan_id'][vlan_id]['ip_address'] = \
m.groupdict()['ip_address']
interface_dict['interface'][interface]['vlan_id'][vlan_id]['interface_status'] = \
m.groupdict()['interface_status']
else:
interface_dict['interface'][interface]['ip_address'] = \
m.groupdict()['ip_address']
interface_dict['interface'][interface]['interface_status'] = \
m.groupdict()['interface_status']
continue
p3 = re.compile(r'^\s*(?P<ipaddress_extension>\([a-z0-9]+\))$')
m = p3.match(line)
if m:
ipaddress_extension = m.groupdict()['ipaddress_extension']
if 'Vlan' in interface:
new_ip_address = interface_dict['interface']\
[interface]['vlan_id'][vlan_id]['ip_address'] + ipaddress_extension
interface_dict['interface'][interface]['vlan_id'][vlan_id]['ip_address'] = \
new_ip_address
else:
new_ip_address = interface_dict['interface']\
[interface]['ip_address'] + ipaddress_extension
interface_dict['interface'][interface]['ip_address'] = new_ip_address
continue
return interface_dict
class ShowIpInterfaceBriefPipeVlan(ShowIpInterfaceBrief):
cli_command = 'show ip interface brief | include Vlan'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cmd = self.cli_command
class ShowInterfaceBriefSchema(MetaParser):
schema = {'interface':
{'ethernet':
{Any():
{'vlan': str,
'type': str,
'mode': str,
'status': str,
'speed': str,
'reason': str,
'port_ch': str}
},
Optional('port'):
{Any():
{Optional('vrf'): str,
Optional('status'): str,
Optional('ip_address'): str,
Optional('speed'): str,
Optional('mtu'): int}
},
Optional('port_channel'):
{Any():
{Optional('vlan'): str,
Optional('type'): str,
Optional('mode'): str,
Optional('status'): str,
Optional('speed'): str,
Optional('reason'): str,
Optional('protocol'): str}
},
Optional('loopback'):
{Any():
{Optional('status'): str,
Optional('description'): str}
},
}
}
class ShowInterfaceBrief(ShowInterfaceBriefSchema):
cli_command = ['show interface brief',
'show interface {interface} brief']
exclude = ['reason']
def cli(self, interface=None, output=None):
if output is None:
if interface:
cmd = self.cli_command[1].format(interface=interface)
else:
cmd = self.cli_command[0]
output = self.device.execute(cmd)
parsed_dict = {}
p1 = re.compile(r'^Port +VRF +Status +IP Address +Speed +MTU$')
p2 = re.compile(r'^(?P<port>[a-zA-Z0-9]+) +(?P<vrf>[a-zA-Z0-9\-]+)'
' +(?P<status>[a-zA-Z]+) +(?P<ip_address>(\S+))'
' +(?P<speed>[0-9]+) +(?P<mtu>[0-9]+)$')
p3 = re.compile(r'^Ethernet +VLAN +Type +Mode +Status +Reason +Speed'
' +Port$')
p4 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<vlan>[a-zA-Z0-9\-]+)'
' +(?P<type>[a-zA-Z]+) +(?P<mode>[a-z]+)'
' +(?P<status>[a-z]+) +(?P<reason>[a-zA-Z\s]+)'
' +(?P<speed>[0-9a-zA-Z\(\)\s]+)'
' +(?P<port>[0-9\-]+)$')
p5 = re.compile(r'^Port-channel +VLAN +Type +Mode +Status +Reason'
' +Speed +Protocol$')
p6 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<vlan>[a-zA-Z0-9\-]+)'
' +(?P<type>[a-zA-Z]+) +(?P<mode>[a-z]+)'
' +(?P<status>[a-z]+) +(?P<reason>[a-zA-Z\s]+)'
' +(?P<speed>[0-9a-zA-Z\(\)\s]+)'
' +(?P<protocol>[a-zA-Z0-9\-]+)$')
p7 = re.compile(r'^Interface +Status +Description$')
p8 = re.compile(r'^(?P<interface>[a-zA-Z0-9\/]+) +(?P<status>[a-z]+)'
' +(?P<description>[a-zA-Z\s\-]+)$')
for line in output.splitlines():
line = line.strip()
m = p1.match(line)
if m:
port_dict = parsed_dict.setdefault('interface', {}).\
setdefault('port', {})
continue
m = p2.match(line)
if m:
group = m.groupdict()
intf_dict = port_dict.\
setdefault(Common.convert_intf_name(group['port']), {})
intf_dict['vrf'] = group['vrf']
intf_dict['status'] = group['status']
intf_dict['ip_address'] = group['ip_address']
intf_dict['speed'] = group['speed']
intf_dict['mtu'] = int(group['mtu'])
continue
m = p3.match(line)
if m:
eth_dict = parsed_dict.setdefault('interface', {}).\
setdefault('ethernet', {})
continue
m = p4.match(line)
if m:
group = m.groupdict()
intf_dict = eth_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['vlan'] = group['vlan']
intf_dict['type'] = group['type']
intf_dict['mode'] = group['mode']
intf_dict['status'] = group['status']
intf_dict['reason'] = group['reason'].strip()
intf_dict['speed'] = group['speed']
intf_dict['port_ch'] = group['port']
continue
m = p5.match(line)
if m:
pch_dict = parsed_dict.setdefault('interface', {}).\
setdefault('port_channel', {})
continue
m = p6.match(line)
if m:
group = m.groupdict()
intf_dict = pch_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['vlan'] = group['vlan']
intf_dict['type'] = group['type']
intf_dict['mode'] = group['mode']
intf_dict['status'] = group['status']
intf_dict['reason'] = group['reason'].strip()
intf_dict['speed'] = group['speed']
intf_dict['protocol'] = group['protocol']
continue
m = p7.match(line)
if m:
loopback_dict = parsed_dict.setdefault('interface', {}).\
setdefault('loopback', {})
continue
m = p8.match(line)
if m:
group = m.groupdict()
intf_dict = loopback_dict.\
setdefault(Common.convert_intf_name(group['interface']), {})
intf_dict['status'] = group['status']
intf_dict['description'] = group['description']
continue
return parsed_dict
class ShowRunningConfigInterfaceSchema(MetaParser):
schema = {'interface':
{Any():
{Optional('shutdown'): bool,
Optional('switchport'): bool,
Optional('switchport_mode'): str,
Optional('trunk_vlans'): str,
Optional('port_channel'):{
Optional('port_channel_mode'): str,
Optional('port_channel_int'): str,
},
Optional('host_reachability_protocol'): str,
Optional('source_interface'): str,
Optional('member_vni'):
{Any():
{Optional('associate_vrf'): bool,
Optional('mcast_group'): str,
Optional('suppress_arp'): bool,
}
},
}
},
}
class ShowRunningConfigInterface(ShowRunningConfigInterfaceSchema):
cli_command = 'show running-config interface {interface}'
def cli(self, interface, output=None):
if output is None:
out = self.device.execute(self.cli_command.format(interface=interface))
else:
out = output
ret_dict = {}
for line in out.splitlines():
line = line.strip()
p1 = re.compile(r'^interface +(?P<intf_name>\S+)$')
m = p1.match(line)
if m:
interface = str(m.groupdict()['intf_name'])
interface_dict = ret_dict.setdefault('interface', {}). \
setdefault(interface, {})
continue
p2 = re.compile(r'^\s*no shutdown$')
m = p2.match(line)
if m:
interface_dict['shutdown'] = False
continue
p3 = re.compile(r'^\s*host-reachability protocol +(?P<protocol>[a-zA-Z]+)$')
m = p3.match(line)
if m:
interface_dict['host_reachability_protocol'] = \
str(m.groupdict()['protocol'])
continue
p4 = re.compile(r'^\s*source-interface +(?P<src_intf>[a-zA-Z0-9\-]+)$')
m = p4.match(line)
if m:
interface_dict['source_interface'] = \
str(m.groupdict()['src_intf'])
continue
p5 = re.compile(r'^\s*member vni +(?P<vni>[0-9\-]+)( +(?P<associate_vrf>[a-zA-Z\-]+))?$')
m = p5.match(line)
if m:
if 'member_vni' not in interface_dict:
interface_dict['member_vni'] = {}
vni = str(m.groupdict()['vni'])
if '-' in vni:
vni_range = re.findall(r'(?P<first_vni>[0-9]+)\-(?P<last_vni>[0-9]+)?$', vni)
members = range(int(vni_range[0][0]), int(vni_range[0][1])+1)
else:
members = [vni]
for memb in members:
interface_dict['member_vni'][str(memb)] = {}
if m.groupdict()['associate_vrf']:
interface_dict['member_vni'][str(memb)]['associate_vrf'] = \
True
continue
p6 = re.compile(r'^\s*mcast-group +(?P<ip>[0-9\.]+)$')
m = p6.match(line)
if m:
for memb in members:
interface_dict['member_vni'][str(memb)]['mcast_group'] = \
str(m.groupdict()['ip'])
continue
p7 = re.compile(r'^\s*suppress-arp$')
m = p7.match(line)
if m:
for memb in members:
interface_dict['member_vni'][str(memb)]['suppress_arp'] = \
True
continue
p8 = re.compile(r'^switchport$')
m = p8.match(line)
if m:
interface_dict.update({'switchport': True})
continue
p9 = re.compile(r'^switchport +mode +(?P<mode>\S+)$')
m = p9.match(line)
if m:
group = m.groupdict()
interface_dict.update({'switchport_mode': group['mode']})
continue
p10 = re.compile(r'^switchport +trunk +allowed +vlan +(?P<trunk_vlans>\S+)$')
m = p10.match(line)
if m:
group = m.groupdict()
interface_dict.update({'trunk_vlans': group['trunk_vlans']})
continue
p11 = re.compile(r'^channel-group +(?P<port_channel_int>\d+) +mode +(?P<mode>\S+)$')
m = p11.match(line)
if m:
group = m.groupdict()
port_channel_dict = interface_dict.setdefault('port_channel', {})
port_channel_dict.update({'port_channel_int': group['port_channel_int']})
port_channel_dict.update({'port_channel_mode': group['mode']})
continue
return ret_dict
class ShowNveInterfaceSchema(MetaParser):
schema = {'interface':
{Any():
{'state': str,
Optional('encapsulation'): str,
Optional('source_interface'):
{Any():
{Optional('primary'): str,
Optional('secondary'): str,
}
},
Optional('vpc_capability'):
{Any():
{Optional('notified'): bool,
}
},
}
},
}
class ShowNveInterface(ShowNveInterfaceSchema):
cli_command = 'show nve interface {interface} detail'
def cli(self, interface, output=None):
cmd = ""
if output is None:
if interface:
cmd = self.cli_command.format(interface=interface)
out = self.device.execute(cmd)
else:
out = output
interface_dict = {}
p1 = re.compile(r'^\s*Interface: +(?P<intf>[\w]+)\,'
' +State: +(?P<state>[\w]+)\, +encapsulation:'
' +(?P<encapsulation>[\w]+)$')
p2 = re.compile(r'^\s*Source-Interface: +(?P<src_intf>[a-zA-Z0-9\-]+)'
' +\(primary: +(?P<primary>[a-zA-Z0-9\.]+)\, +secondary:'
' +(?P<secondary>[a-zA-Z0-9\.]+)\)$')
p3 = re.compile(r'^\s*VPC Capability: +(?P<vpc>[a-zA-Z0-9\-]+)'
' +\[(?P<notified>[a-zA-Z\-]+)\]$')
for line in out.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
intf = str(m.groupdict()['intf'])
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if intf not in interface_dict['interface']:
interface_dict['interface'][intf] = {}
interface_dict['interface'][intf]['state'] = \
str(m.groupdict()['state'])
interface_dict['interface'][intf]['encapsulation'] = \
str(m.groupdict()['encapsulation'])
continue
m = p2.match(line)
if m:
src_intf = str(m.groupdict()['src_intf'])
if 'source_interface' not in interface_dict['interface'][intf]:
interface_dict['interface'][intf]['source_interface'] = {}
if src_intf not in interface_dict['interface'][intf]['source_interface']:
interface_dict['interface'][intf]['source_interface'][src_intf] = {}
interface_dict['interface'][intf]['source_interface'][src_intf]['primary'] = \
str(m.groupdict()['primary'])
interface_dict['interface'][intf]['source_interface'][src_intf]['secondary'] = \
str(m.groupdict()['secondary'])
continue
m = p3.match(line)
if m:
vpc = str(m.groupdict()['vpc'])
notified = str(m.groupdict()['notified'])
if 'vpc_capability' not in interface_dict['interface'][intf]:
interface_dict['interface'][intf]['vpc_capability'] = {}
if vpc not in interface_dict['interface'][intf]['vpc_capability']:
interface_dict['interface'][intf]['vpc_capability'][vpc] = {}
if notified == 'notified':
interface_dict['interface'][intf]['vpc_capability'][vpc]['notified'] = \
True
else:
interface_dict['interface'][intf]['vpc_capability'][vpc]['notified'] = \
False
continue
return interface_dict
class ShowIpInterfaceBriefVrfAllSchema(MetaParser):
schema = {'interface':
{Any():
{Optional('ip_address'): str,
Optional('interface_status'): str}
},
}
class ShowIpInterfaceBriefVrfAll(ShowIpInterfaceBriefVrfAllSchema):
cli_command = ['show ip interface brief vrf all | include {ip}', 'show ip interface brief vrf all']
def cli(self, ip='', output=None):
if output is None:
if ip:
cmd = self.cli_command[0].format(ip=ip)
else:
cmd = self.cli_command[1]
out = self.device.execute(cmd)
else:
out = output
interface_dict = {}
p = re.compile(r'^\s*(?P<interface>[a-zA-Z0-9\/\.\-]+) '
'+(?P<ip_address>[a-z0-9\.]+) +(?P<interface_status>[a-z\-\/]+)$')
for line in out.splitlines():
line = line.rstrip()
m = p.match(line)
if m:
interface = m.groupdict()['interface']
if 'interface' not in interface_dict:
interface_dict['interface'] = {}
if interface not in interface_dict['interface']:
interface_dict['interface'][interface] = {}
interface_dict['interface'][interface]['ip_address'] = \
str(m.groupdict()['ip_address'])
interface_dict['interface'][interface]['interface_status'] = \
str(m.groupdict()['interface_status'])
continue
return interface_dict
| true | true |
f73d37f0383eae749e39ec21857ae0e174a2118b | 266 | py | Python | Python/grading.py | mimseyedi/Kattis | a99ea2112544e89cc466feb7d81ffe6eb017f7e2 | [
"MIT"
] | null | null | null | Python/grading.py | mimseyedi/Kattis | a99ea2112544e89cc466feb7d81ffe6eb017f7e2 | [
"MIT"
] | null | null | null | Python/grading.py | mimseyedi/Kattis | a99ea2112544e89cc466feb7d81ffe6eb017f7e2 | [
"MIT"
] | null | null | null | a, b, c, d, e = map(int, input().split())
score = int(input())
if 100 >= score >= a:
print('A')
elif a > score >= b:
print('B')
elif b > score >= c:
print('C')
elif c > score >= d:
print('D')
elif d > score >= e:
print('E')
else:
print('F')
| 16.625 | 41 | 0.484962 | a, b, c, d, e = map(int, input().split())
score = int(input())
if 100 >= score >= a:
print('A')
elif a > score >= b:
print('B')
elif b > score >= c:
print('C')
elif c > score >= d:
print('D')
elif d > score >= e:
print('E')
else:
print('F')
| true | true |
f73d380cb15edd93a744b3cbc98bfa32b1a02445 | 20,643 | py | Python | wfs.py | B0und/WaifuFileSort | 377c2c73a1dc0eaf6c6fd709b41c2b85e2658cfd | [
"MIT"
] | 1 | 2021-09-16T22:35:45.000Z | 2021-09-16T22:35:45.000Z | wfs.py | B0und/WaifuFileSort | 377c2c73a1dc0eaf6c6fd709b41c2b85e2658cfd | [
"MIT"
] | null | null | null | wfs.py | B0und/WaifuFileSort | 377c2c73a1dc0eaf6c6fd709b41c2b85e2658cfd | [
"MIT"
] | 1 | 2021-09-16T22:35:46.000Z | 2021-09-16T22:35:46.000Z | # import convert_ui_and_qrc_files
import json
import os
import pathlib
import shutil
import sys
from functools import partial
from pprint import pprint
import imagehash
from PIL import Image
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QKeySequence, QPalette
from PyQt5.QtWidgets import (QAbstractItemView, QDialog, QFileDialog,
QFileSystemModel, QShortcut)
from send2trash import send2trash
from main_ui import Ui_MainWindow
from vers import get_version
if not os.path.exists('delete'):
os.makedirs('delete')
class MainWindow(QtWidgets.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
# style window
self.setWindowTitle("Waifu File Sort")
app_icon = QtGui.QIcon()
app_icon.addFile("./icons/waifu_sort.png", QtCore.QSize(256, 256))
self.setWindowIcon(app_icon)
# store important data
self.path_hotkey_dict = {}
self.hotkey_path_dict = {}
self._shortcut_list = []
self.undo_list = []
self.delete_folder = str(pathlib.Path(find_data_file("delete")))
self.current_file_folder = ""
self.pic_ext_list = [".jpg", ".png", ".webp", ".JPEG", ".PNG"]
self.default_palette = QtGui.QGuiApplication.palette()
# initialize source directory
self.model = QFileSystemModel()
self.model.setNameFilters(
["*.jpg", "*.png", "*.webp", "*.JPEG", "*.PNG"])
self.model.setNameFilterDisables(False)
self.ui.treeView.setModel(self.model)
self.ui.treeView.setSelectionMode(QAbstractItemView.SingleSelection)
self.ui.tableWidget.setSelectionMode(
QtWidgets.QAbstractItemView.SingleSelection
)
self.ui.treeView.selectionModel().selectionChanged.connect(
self.update_image_label
)
# hotkeys
self.delShortcut = QShortcut(QKeySequence("Delete"), self)
self.delShortcut.activated.connect(
partial(self.move_cb, None, self.delete_folder)
)
self.undoShortcut = QShortcut(QKeySequence("Ctrl+Z"), self)
self.undoShortcut.activated.connect(self.undo_cb)
# callbacks init
self.ui.browseBtn.clicked.connect(self.browse_source_click)
self.ui.addDest.clicked.connect(self.add_dest_to_table)
self.ui.removeDest.clicked.connect(self.remove_destination)
self.ui.actionAbout.triggered.connect(self.show_about_dialog)
self.ui.actionSave_Preset.triggered.connect(self.save_preset_cb)
self.ui.actionLoad_Preset.triggered.connect(self.load_preset_cb)
self.ui.actionClear_Delete_Folder.triggered.connect(self.clear_deleted_folder)
self.ui.unmoveBtn.clicked.connect(self.undo_cb)
self.ui.checkDeletedBtn.clicked.connect(self.check_deleted_btn_cb)
self.ui.actionFancy.triggered.connect(self.set_fancy_style)
self.ui.actionLight.triggered.connect(self.set_light_style)
self.ui.actionDark.triggered.connect(self.set_dark_style)
self.ui.comboMode.currentTextChanged.connect(self.change_file_type)
self.ui.actionOrange.triggered.connect(self.set_orange_style)
self.ui.actionRemove_Duplicates.triggered.connect(
self.remove_duplicate_pictures)
self.ui.actionRemove_Duplicates_Recursively.triggered.connect(
partial(self.remove_duplicate_pictures, recursive_delete=True))
self.set_dark_style()
def remove_duplicate_pictures(self, recursive_delete=False):
root_path = self.model.rootPath()
if root_path == ".":
return # if source destination wasnt chosen
# check for missclick
msg = "Are you sure you want to delete (trash bin) duplicate pictures from source folder?"
reply = QtWidgets.QMessageBox.question(self, 'Message',
msg, QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No)
if reply != QtWidgets.QMessageBox.Yes:
return
# gather pictures from root path
all_pictures = []
if recursive_delete: # recursive search
for path in pathlib.Path(root_path).glob(r'**/*'):
if path.suffix in self.pic_ext_list:
all_pictures.append(path)
else: # non recursive
for path in pathlib.Path(root_path).glob(r'*'):
if path.suffix in self.pic_ext_list:
all_pictures.append(path)
# add phash of picture to dictionary, replace with shorter filename if same hash found
result_pics = {}
for path in all_pictures:
with Image.open(path) as img:
img_hash = str(imagehash.phash(img))
if img_hash in result_pics:
dict_fname = result_pics[img_hash].stem
if len(path.stem) < len(dict_fname):
result_pics[img_hash] = path
else:
result_pics[img_hash] = path
result_pics = {value: key for key, value in result_pics.items()}
# delete all pictures that are not in a result_pics dict
for path in all_pictures:
if path not in result_pics:
try:
shutil.move(str(path), self.delete_folder)
except shutil.Error:
send2trash(str(path))
QtWidgets.QMessageBox.about(self, "Info", "Done")
def add_text_to_buttons(self):
self.ui.addDest.setText("Add")
self.ui.removeDest.setText("Remove")
self.ui.browseBtn.setText("Browse")
self.ui.checkDeletedBtn.setText("Deleted")
self.ui.unmoveBtn.setText("Undo")
def remove_text_from_buttons(self):
self.ui.addDest.setText("")
self.ui.removeDest.setText("")
self.ui.browseBtn.setText("")
self.ui.checkDeletedBtn.setText("")
self.ui.unmoveBtn.setText("")
def set_orange_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
with open("./styles/orange.css") as f:
style_text = f.read()
self.setStyleSheet(style_text)
self.add_text_to_buttons()
def set_fancy_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
with open("./styles/fancy.css") as f:
style_text = f.read()
self.setStyleSheet(style_text)
self.remove_text_from_buttons()
def set_light_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
self.setStyleSheet(" ")
self.add_text_to_buttons()
def set_dark_style(self):
self.setStyleSheet(" ")
self.add_text_to_buttons()
dark_palette = QPalette()
dark_palette.setColor(QPalette.Window, QColor(53, 53, 53))
dark_palette.setColor(QPalette.WindowText, Qt.white)
dark_palette.setColor(QPalette.Base, QColor(25, 25, 25))
dark_palette.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
dark_palette.setColor(QPalette.ToolTipBase, Qt.white)
dark_palette.setColor(QPalette.ToolTipText, Qt.white)
dark_palette.setColor(QPalette.Text, Qt.white)
dark_palette.setColor(QPalette.Button, QColor(53, 53, 53))
dark_palette.setColor(QPalette.ButtonText, Qt.white)
dark_palette.setColor(QPalette.BrightText, Qt.red)
dark_palette.setColor(QPalette.Link, QColor(42, 130, 218))
dark_palette.setColor(QPalette.Highlight, QColor(42, 130, 218))
dark_palette.setColor(QPalette.HighlightedText, Qt.black)
QtGui.QGuiApplication.setPalette(dark_palette)
def change_file_type(self):
"""
Change source directory display between pictures and files
"""
mode = self.ui.comboMode.currentText()
if mode == "Files":
self.model.setNameFilters(["*.*"])
elif mode == "Pictures":
self.model.setNameFilters(
["*.jpg", "*.png", "*.webp", ".JPEG", ".PNG"])
def check_deleted_btn_cb(self):
"""
This is supposed to change model view to the deleted folder,
and second press is supposed to bring you back to the previous
folder, but it doesnt work if you dont select an image in deleted folder.
"""
ind = self.ui.treeView.currentIndex()
file_path = self.model.filePath(ind)
try:
file_path = pathlib.Path(file_path).parents[0].resolve()
except IndexError:
return
if file_path != pathlib.Path(self.delete_folder).resolve():
self.model.setRootPath(self.delete_folder)
self.ui.treeView.setRootIndex(self.model.index(self.delete_folder))
else:
self.model.setRootPath(self.current_file_folder)
self.ui.treeView.setRootIndex(
self.model.index(self.current_file_folder))
def clear_deleted_folder(self):
msg = "Are you sure you want to clear folder with deleted files?"
reply = QtWidgets.QMessageBox.question(self, 'Message',
msg, QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No)
if reply == QtWidgets.QMessageBox.Yes:
p = pathlib.Path(self.delete_folder)
for filename in p.glob("*"):
send2trash(str(filename))
QtWidgets.QMessageBox.about(
self, "Delete folder cleared", "Delete folder cleared"
)
def undo_cb(self):
"""
Store actions in a list, revert them 1 by 1
"""
try:
last_operation = self.undo_list[-1]
except IndexError:
return
pic_path, dest_path = last_operation
pic_path = pathlib.Path(pic_path)
dest_path = pathlib.Path(dest_path, pic_path.name)
pic_path, dest_path = dest_path, pic_path
# print(pic_path.parents[0], dest_path)
try:
shutil.move(pic_path, str(dest_path))
except shutil.Error:
QtWidgets.QMessageBox.warning(
self, "Warning", "File already exists")
except AttributeError:
return
except FileNotFoundError:
return
del self.undo_list[-1]
def load_preset_cb(self):
"""
Load user settings from file
"""
dialog = QFileDialog()
dialog.setFilter(dialog.filter() | QtCore.QDir.Hidden)
dialog.setDefaultSuffix("json")
dialog.setAcceptMode(QFileDialog.AcceptOpen)
dialog.setNameFilters(["JSON (*.json)"])
if dialog.exec_() == QDialog.Accepted:
preset_path = dialog.selectedFiles()[0]
self.path_hotkey_dict = load_json(preset_path)
self.path_hotkey_dict = {
k: v for k, v in self.path_hotkey_dict.items() if v is not None
}
print("loaded dict: ", self.path_hotkey_dict)
self.hotkey_path_dict = {
value: key for key, value in self.path_hotkey_dict.items()
}
self.restore_table_from_dict()
else:
print("Cancelled")
def save_preset_cb(self):
"""
Save user settings to file
"""
dialog = QFileDialog()
dialog.setFilter(dialog.filter() | QtCore.QDir.Hidden)
dialog.setDefaultSuffix("json")
dialog.setAcceptMode(QFileDialog.AcceptSave)
dialog.setNameFilters(["JSON (*.json)"])
if dialog.exec_() == QDialog.Accepted:
preset_path = dialog.selectedFiles()[0]
save_json(self.path_hotkey_dict, preset_path)
QtWidgets.QMessageBox.information(
self, "Saved", f"Saved hotkey preset: {preset_path}"
)
else:
print("Cancelled")
def show_about_dialog(self):
text = (
"<center>"
"<h1>Waifu File Sort</h1>"
"⁣"
"</center>"
f"<p>Version {get_version()}<br/>"
)
QtWidgets.QMessageBox.about(self, "About Waifu File Sort", text)
def restore_table_from_dict(self):
self.clear_table_widget()
row_counter = 0
for shortcut in self._shortcut_list:
shortcut.setEnabled(False)
self._shortcut_list = []
for path, hotkey in self.path_hotkey_dict.items():
path = pathlib.Path(path)
self.add_dest_to_table(dest_path=path.name, hotkey=hotkey)
self.ui.tableWidget.item(row_counter, 0).setToolTip(str(path))
shortcut = QShortcut(QKeySequence(hotkey), self)
shortcut.activated.connect(
lambda mypath=path: self.move_cb(input_path=mypath))
self._shortcut_list.append(shortcut)
row_counter += 1
def clear_table_widget(self):
self.ui.tableWidget.clearContents()
self.ui.tableWidget.setRowCount(0)
def remove_destination(self):
# get selected row or return
# delete info from both dicts
# reconstruct table widget from dict
current_row = self.ui.tableWidget.currentRow()
# print(f"{current_row=}")
try:
dest_path = self.ui.tableWidget.item(current_row, 0).toolTip()
except AttributeError:
return
hotkey = self.ui.tableWidget.cellWidget(current_row, 2).text()
# print("deleting hotkey: ", hotkey)
self.delete_hotkey(hotkey)
try:
del self.path_hotkey_dict[dest_path]
except KeyError:
pass
try:
del self.hotkey_path_dict[hotkey]
except KeyError:
pass
self.restore_table_from_dict()
def delete_hotkey(self, name):
for shortcut in self._shortcut_list:
key_name = shortcut.key().toString()
# print("k-name: ", key_name)
if key_name == name.upper():
# print("DELETED hotkey: ", name)
shortcut.setEnabled(False)
def add_dest_to_table(self, dest_path=None, hotkey=None):
self.ui.tableWidget.setEditTriggers(
self.ui.tableWidget.NoEditTriggers
) # disable editing and sorting
self.ui.tableWidget.setSortingEnabled(False)
row_counter = self.ui.tableWidget.rowCount()
self.ui.tableWidget.insertRow(row_counter)
########################################################
# add path label
dest_path = QtWidgets.QTableWidgetItem(
dest_path or "Press browse to specify destination directory"
)
self.ui.tableWidget.setItem(row_counter, 0, dest_path)
########################################################
# add browse button
browse_btn = QtWidgets.QPushButton("Browse")
browse_btn.clicked.connect(
lambda *args, row_ind=row_counter: self.browse_dest_click(row_ind)
)
self.ui.tableWidget.setCellWidget(row_counter, 1, browse_btn)
########################################################
# add hotkey line edit
hotkey_line = QtWidgets.QLineEdit()
hotkey_line.setPlaceholderText("Add hotkey")
hotkey_line.setText(hotkey)
hotkey_line.setMaxLength(1)
hotkey_line.textChanged.connect(
lambda *args, row_ind=row_counter: self.hotkey_line_text_changed_cb(
hotkey_line, row_ind
)
)
self.ui.tableWidget.setCellWidget(row_counter, 2, hotkey_line)
########################################################
# add send button
send_btn = QtWidgets.QPushButton("Send")
send_btn.clicked.connect(
lambda *args, row_ind=row_counter: self.move_cb(row=row_ind)
)
self.ui.tableWidget.setCellWidget(row_counter, 3, send_btn)
def move_cb(self, row=None, input_path=None):
ind = self.ui.treeView.currentIndex()
pic_path = self.model.filePath(ind)
dest_path = input_path or self.ui.tableWidget.item(row, 0).toolTip()
dest_path = pathlib.Path(dest_path)
if dest_path.is_dir() and str(dest_path) != ".":
try:
shutil.move(pic_path, str(dest_path))
except shutil.Error:
QtWidgets.QMessageBox.warning(
self, "Warning", "File already exists")
return
self.undo_list.append((pic_path, str(dest_path)))
else:
# notify user
QtWidgets.QMessageBox.warning(
self, "Warning", "Press Browse to add destination folder"
)
def hotkey_line_text_changed_cb(self, hotkey_line, row_ind):
hotkey = hotkey_line.text()
path = self.ui.tableWidget.item(row_ind, 0).toolTip()
if not path and len(hotkey) > 0:
QtWidgets.QMessageBox.warning(
self, "Warning", "Press Browse to add destination folder, add hotkey after"
)
hotkey_line.clear()
hotkey_line.clearFocus()
return
# check if hotkey line edit is empty and delete hotkey
if len(hotkey) == 0 and path != "":
hotkey_to_del = self.path_hotkey_dict[path]
self.delete_hotkey(hotkey_to_del)
self.path_hotkey_dict[path] = hotkey
self.hotkey_path_dict = {
value: key for key, value in self.path_hotkey_dict.items()
}
shortcut = QShortcut(QKeySequence(hotkey), self)
# self._shortcut_list.append(shortcut.key().toString())
self._shortcut_list.append(shortcut)
dest_path = self.hotkey_path_dict[hotkey]
shortcut.activated.connect(lambda: self.move_cb(input_path=dest_path))
if len(hotkey) > 0:
hotkey_line.clearFocus()
def browse_dest_click(self, caller_row):
# print(caller_row)
dialog = QFileDialog()
folder_path = dialog.getExistingDirectory(None, "Select Folder")
p = pathlib.Path(folder_path)
if folder_path:
self.ui.tableWidget.item(caller_row, 0).setText(p.name)
self.ui.tableWidget.item(caller_row, 0).setToolTip(str(p))
self.path_hotkey_dict[str(p)] = self.ui.tableWidget.cellWidget(
caller_row, 2
).text()
def browse_source_click(self):
dialog = QFileDialog()
folder_path = dialog.getExistingDirectory(None, "Select Folder")
if folder_path:
self.model.setRootPath(folder_path)
self.ui.treeView.setRootIndex(self.model.index(folder_path))
def update_image_label(self):
ind = self.ui.treeView.currentIndex()
file_path = self.model.filePath(ind)
# keep track of current folder for check button return location
try:
path_to_current_folder = pathlib.Path(file_path).parents[0]
except IndexError:
return # fix click on C drive crash
if str(path_to_current_folder.resolve()) != str(
pathlib.Path(self.delete_folder).resolve()
):
self.current_file_folder = str(path_to_current_folder)
pixmap = QtGui.QPixmap(file_path)
pixmap = pixmap.scaled(
self.ui.imageLabel.width(),
self.ui.imageLabel.height(),
QtCore.Qt.KeepAspectRatio,
)
self.ui.imageLabel.setPixmap(pixmap)
self.ui.imageLabel.setAlignment(QtCore.Qt.AlignCenter)
def save_json(data, result_name_with_ext):
with open(result_name_with_ext, "w") as fp:
json.dump(data, fp)
def load_json(input_name_and_ext):
with open(input_name_and_ext, "r") as fp:
data = json.load(fp)
return data
def find_data_file(folder, filename=None):
if getattr(sys, "frozen", False):
# The application is frozen
datadir = os.path.dirname(sys.executable)
else:
# The application is not frozen
datadir = os.path.dirname(__file__)
# The following line has been changed to match where you store your data files:
if filename:
return os.path.join(datadir, folder, filename)
else:
return os.path.join(datadir, folder)
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
app.setStyle("Fusion")
w = MainWindow()
w.show()
sys.exit(app.exec_())
| 37.877064 | 98 | 0.614688 |
import json
import os
import pathlib
import shutil
import sys
from functools import partial
from pprint import pprint
import imagehash
from PIL import Image
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QKeySequence, QPalette
from PyQt5.QtWidgets import (QAbstractItemView, QDialog, QFileDialog,
QFileSystemModel, QShortcut)
from send2trash import send2trash
from main_ui import Ui_MainWindow
from vers import get_version
if not os.path.exists('delete'):
os.makedirs('delete')
class MainWindow(QtWidgets.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.setWindowTitle("Waifu File Sort")
app_icon = QtGui.QIcon()
app_icon.addFile("./icons/waifu_sort.png", QtCore.QSize(256, 256))
self.setWindowIcon(app_icon)
self.path_hotkey_dict = {}
self.hotkey_path_dict = {}
self._shortcut_list = []
self.undo_list = []
self.delete_folder = str(pathlib.Path(find_data_file("delete")))
self.current_file_folder = ""
self.pic_ext_list = [".jpg", ".png", ".webp", ".JPEG", ".PNG"]
self.default_palette = QtGui.QGuiApplication.palette()
self.model = QFileSystemModel()
self.model.setNameFilters(
["*.jpg", "*.png", "*.webp", "*.JPEG", "*.PNG"])
self.model.setNameFilterDisables(False)
self.ui.treeView.setModel(self.model)
self.ui.treeView.setSelectionMode(QAbstractItemView.SingleSelection)
self.ui.tableWidget.setSelectionMode(
QtWidgets.QAbstractItemView.SingleSelection
)
self.ui.treeView.selectionModel().selectionChanged.connect(
self.update_image_label
)
self.delShortcut = QShortcut(QKeySequence("Delete"), self)
self.delShortcut.activated.connect(
partial(self.move_cb, None, self.delete_folder)
)
self.undoShortcut = QShortcut(QKeySequence("Ctrl+Z"), self)
self.undoShortcut.activated.connect(self.undo_cb)
self.ui.browseBtn.clicked.connect(self.browse_source_click)
self.ui.addDest.clicked.connect(self.add_dest_to_table)
self.ui.removeDest.clicked.connect(self.remove_destination)
self.ui.actionAbout.triggered.connect(self.show_about_dialog)
self.ui.actionSave_Preset.triggered.connect(self.save_preset_cb)
self.ui.actionLoad_Preset.triggered.connect(self.load_preset_cb)
self.ui.actionClear_Delete_Folder.triggered.connect(self.clear_deleted_folder)
self.ui.unmoveBtn.clicked.connect(self.undo_cb)
self.ui.checkDeletedBtn.clicked.connect(self.check_deleted_btn_cb)
self.ui.actionFancy.triggered.connect(self.set_fancy_style)
self.ui.actionLight.triggered.connect(self.set_light_style)
self.ui.actionDark.triggered.connect(self.set_dark_style)
self.ui.comboMode.currentTextChanged.connect(self.change_file_type)
self.ui.actionOrange.triggered.connect(self.set_orange_style)
self.ui.actionRemove_Duplicates.triggered.connect(
self.remove_duplicate_pictures)
self.ui.actionRemove_Duplicates_Recursively.triggered.connect(
partial(self.remove_duplicate_pictures, recursive_delete=True))
self.set_dark_style()
def remove_duplicate_pictures(self, recursive_delete=False):
root_path = self.model.rootPath()
if root_path == ".":
return
msg = "Are you sure you want to delete (trash bin) duplicate pictures from source folder?"
reply = QtWidgets.QMessageBox.question(self, 'Message',
msg, QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No)
if reply != QtWidgets.QMessageBox.Yes:
return
all_pictures = []
if recursive_delete:
for path in pathlib.Path(root_path).glob(r'**/*'):
if path.suffix in self.pic_ext_list:
all_pictures.append(path)
else:
for path in pathlib.Path(root_path).glob(r'*'):
if path.suffix in self.pic_ext_list:
all_pictures.append(path)
result_pics = {}
for path in all_pictures:
with Image.open(path) as img:
img_hash = str(imagehash.phash(img))
if img_hash in result_pics:
dict_fname = result_pics[img_hash].stem
if len(path.stem) < len(dict_fname):
result_pics[img_hash] = path
else:
result_pics[img_hash] = path
result_pics = {value: key for key, value in result_pics.items()}
for path in all_pictures:
if path not in result_pics:
try:
shutil.move(str(path), self.delete_folder)
except shutil.Error:
send2trash(str(path))
QtWidgets.QMessageBox.about(self, "Info", "Done")
def add_text_to_buttons(self):
self.ui.addDest.setText("Add")
self.ui.removeDest.setText("Remove")
self.ui.browseBtn.setText("Browse")
self.ui.checkDeletedBtn.setText("Deleted")
self.ui.unmoveBtn.setText("Undo")
def remove_text_from_buttons(self):
self.ui.addDest.setText("")
self.ui.removeDest.setText("")
self.ui.browseBtn.setText("")
self.ui.checkDeletedBtn.setText("")
self.ui.unmoveBtn.setText("")
def set_orange_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
with open("./styles/orange.css") as f:
style_text = f.read()
self.setStyleSheet(style_text)
self.add_text_to_buttons()
def set_fancy_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
with open("./styles/fancy.css") as f:
style_text = f.read()
self.setStyleSheet(style_text)
self.remove_text_from_buttons()
def set_light_style(self):
QtGui.QGuiApplication.setPalette(self.default_palette)
self.setStyleSheet(" ")
self.add_text_to_buttons()
def set_dark_style(self):
self.setStyleSheet(" ")
self.add_text_to_buttons()
dark_palette = QPalette()
dark_palette.setColor(QPalette.Window, QColor(53, 53, 53))
dark_palette.setColor(QPalette.WindowText, Qt.white)
dark_palette.setColor(QPalette.Base, QColor(25, 25, 25))
dark_palette.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
dark_palette.setColor(QPalette.ToolTipBase, Qt.white)
dark_palette.setColor(QPalette.ToolTipText, Qt.white)
dark_palette.setColor(QPalette.Text, Qt.white)
dark_palette.setColor(QPalette.Button, QColor(53, 53, 53))
dark_palette.setColor(QPalette.ButtonText, Qt.white)
dark_palette.setColor(QPalette.BrightText, Qt.red)
dark_palette.setColor(QPalette.Link, QColor(42, 130, 218))
dark_palette.setColor(QPalette.Highlight, QColor(42, 130, 218))
dark_palette.setColor(QPalette.HighlightedText, Qt.black)
QtGui.QGuiApplication.setPalette(dark_palette)
def change_file_type(self):
mode = self.ui.comboMode.currentText()
if mode == "Files":
self.model.setNameFilters(["*.*"])
elif mode == "Pictures":
self.model.setNameFilters(
["*.jpg", "*.png", "*.webp", ".JPEG", ".PNG"])
def check_deleted_btn_cb(self):
ind = self.ui.treeView.currentIndex()
file_path = self.model.filePath(ind)
try:
file_path = pathlib.Path(file_path).parents[0].resolve()
except IndexError:
return
if file_path != pathlib.Path(self.delete_folder).resolve():
self.model.setRootPath(self.delete_folder)
self.ui.treeView.setRootIndex(self.model.index(self.delete_folder))
else:
self.model.setRootPath(self.current_file_folder)
self.ui.treeView.setRootIndex(
self.model.index(self.current_file_folder))
def clear_deleted_folder(self):
msg = "Are you sure you want to clear folder with deleted files?"
reply = QtWidgets.QMessageBox.question(self, 'Message',
msg, QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No)
if reply == QtWidgets.QMessageBox.Yes:
p = pathlib.Path(self.delete_folder)
for filename in p.glob("*"):
send2trash(str(filename))
QtWidgets.QMessageBox.about(
self, "Delete folder cleared", "Delete folder cleared"
)
def undo_cb(self):
try:
last_operation = self.undo_list[-1]
except IndexError:
return
pic_path, dest_path = last_operation
pic_path = pathlib.Path(pic_path)
dest_path = pathlib.Path(dest_path, pic_path.name)
pic_path, dest_path = dest_path, pic_path
try:
shutil.move(pic_path, str(dest_path))
except shutil.Error:
QtWidgets.QMessageBox.warning(
self, "Warning", "File already exists")
except AttributeError:
return
except FileNotFoundError:
return
del self.undo_list[-1]
def load_preset_cb(self):
dialog = QFileDialog()
dialog.setFilter(dialog.filter() | QtCore.QDir.Hidden)
dialog.setDefaultSuffix("json")
dialog.setAcceptMode(QFileDialog.AcceptOpen)
dialog.setNameFilters(["JSON (*.json)"])
if dialog.exec_() == QDialog.Accepted:
preset_path = dialog.selectedFiles()[0]
self.path_hotkey_dict = load_json(preset_path)
self.path_hotkey_dict = {
k: v for k, v in self.path_hotkey_dict.items() if v is not None
}
print("loaded dict: ", self.path_hotkey_dict)
self.hotkey_path_dict = {
value: key for key, value in self.path_hotkey_dict.items()
}
self.restore_table_from_dict()
else:
print("Cancelled")
def save_preset_cb(self):
dialog = QFileDialog()
dialog.setFilter(dialog.filter() | QtCore.QDir.Hidden)
dialog.setDefaultSuffix("json")
dialog.setAcceptMode(QFileDialog.AcceptSave)
dialog.setNameFilters(["JSON (*.json)"])
if dialog.exec_() == QDialog.Accepted:
preset_path = dialog.selectedFiles()[0]
save_json(self.path_hotkey_dict, preset_path)
QtWidgets.QMessageBox.information(
self, "Saved", f"Saved hotkey preset: {preset_path}"
)
else:
print("Cancelled")
def show_about_dialog(self):
text = (
"<center>"
"<h1>Waifu File Sort</h1>"
"⁣"
"</center>"
f"<p>Version {get_version()}<br/>"
)
QtWidgets.QMessageBox.about(self, "About Waifu File Sort", text)
def restore_table_from_dict(self):
self.clear_table_widget()
row_counter = 0
for shortcut in self._shortcut_list:
shortcut.setEnabled(False)
self._shortcut_list = []
for path, hotkey in self.path_hotkey_dict.items():
path = pathlib.Path(path)
self.add_dest_to_table(dest_path=path.name, hotkey=hotkey)
self.ui.tableWidget.item(row_counter, 0).setToolTip(str(path))
shortcut = QShortcut(QKeySequence(hotkey), self)
shortcut.activated.connect(
lambda mypath=path: self.move_cb(input_path=mypath))
self._shortcut_list.append(shortcut)
row_counter += 1
def clear_table_widget(self):
self.ui.tableWidget.clearContents()
self.ui.tableWidget.setRowCount(0)
def remove_destination(self):
current_row = self.ui.tableWidget.currentRow()
try:
dest_path = self.ui.tableWidget.item(current_row, 0).toolTip()
except AttributeError:
return
hotkey = self.ui.tableWidget.cellWidget(current_row, 2).text()
self.delete_hotkey(hotkey)
try:
del self.path_hotkey_dict[dest_path]
except KeyError:
pass
try:
del self.hotkey_path_dict[hotkey]
except KeyError:
pass
self.restore_table_from_dict()
def delete_hotkey(self, name):
for shortcut in self._shortcut_list:
key_name = shortcut.key().toString()
if key_name == name.upper():
shortcut.setEnabled(False)
def add_dest_to_table(self, dest_path=None, hotkey=None):
self.ui.tableWidget.setEditTriggers(
self.ui.tableWidget.NoEditTriggers
)
self.ui.tableWidget.setSortingEnabled(False)
row_counter = self.ui.tableWidget.rowCount()
self.ui.tableWidget.insertRow(row_counter)
| true | true |
f73d390476569b26a584abed301f23bccace7db5 | 4,340 | py | Python | helps/deap/deap-master/examples/ga/xkcd.py | GrimRanger/GeneticAlgorithm | 93fa476e82d610f8622276526baa269303a058e0 | [
"MIT"
] | 1 | 2019-03-21T15:32:19.000Z | 2019-03-21T15:32:19.000Z | helps/deap/deap-master/examples/ga/xkcd.py | GrimRanger/GeneticAlgorithm | 93fa476e82d610f8622276526baa269303a058e0 | [
"MIT"
] | null | null | null | helps/deap/deap-master/examples/ga/xkcd.py | GrimRanger/GeneticAlgorithm | 93fa476e82d610f8622276526baa269303a058e0 | [
"MIT"
] | null | null | null | # This file is part of DEAP.
#
# DEAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# DEAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with DEAP. If not, see <http://www.gnu.org/licenses/>.
"""This example shows a possible answer to a problem that can be found in this
xkcd comics: http://xkcd.com/287/. In the comic, the characters want to get
exactly 15.05$ worth of appetizers, as fast as possible."""
import random
from operator import attrgetter
from collections import Counter
# We delete the reduction function of the Counter because it doesn't copy added
# attributes. Because we create a class that inherit from the Counter, the
# fitness attribute was not copied by the deepcopy.
del Counter.__reduce__
import numpy
from deap import algorithms
from deap import base
from deap import creator
from deap import tools
IND_INIT_SIZE = 3
# Create the item dictionary: item id is an integer, and value is
# a (name, weight, value) 3-uple. Since the comic didn't specified a time for
# each menu item, random was called to generate a time.
ITEMS_NAME = "Mixed Fruit", "French Fries", "Side Salad", "Hot Wings", "Mozzarella Sticks", "Sampler Plate"
ITEMS_PRICE = 2.15, 2.75, 3.35, 3.55, 4.2, 5.8
ITEMS = dict((name, (price, random.uniform(1, 5))) for name, price in zip(ITEMS_NAME, ITEMS_PRICE))
creator.create("Fitness", base.Fitness, weights=(-1.0, -1.0))
creator.create("Individual", Counter, fitness=creator.Fitness)
toolbox = base.Toolbox()
toolbox.register("attr_item", random.choice, ITEMS_NAME)
toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_item, IND_INIT_SIZE)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
def evalXKCD(individual, target_price):
"""Evaluates the fitness and return the error on the price and the time
taken by the order if the chef can cook everything in parallel."""
price = 0.0
times = list()
for item, number in individual.items():
price += ITEMS[item][0] * number
times.append(ITEMS[item][1])
return abs(price - target_price), max(times)
def cxCounter(ind1, ind2, indpb):
"""Swaps the number of perticular items between two individuals"""
for key in ITEMS.keys():
if random.random() < indpb:
ind1[key], ind2[key] = ind2[key], ind1[key]
return ind1, ind2
def mutCounter(individual):
"""Adds or remove an item from an individual"""
if random.random() > 0.5:
individual.update([random.choice(ITEMS_NAME)])
else:
val = random.choice(ITEMS_NAME)
individual.subtract([val])
if individual[val] < 0:
del individual[val]
return individual,
toolbox.register("evaluate", evalXKCD, target_price=15.05)
toolbox.register("mate", cxCounter, indpb=0.5)
toolbox.register("mutate", mutCounter)
toolbox.register("select", tools.selNSGA2)
def main():
NGEN = 40
MU = 100
LAMBDA = 200
CXPB = 0.3
MUTPB = 0.6
pop = toolbox.population(n=MU)
hof = tools.ParetoFront()
price_stats = tools.Statistics(key=lambda ind: ind.fitness.values[0])
time_stats = tools.Statistics(key=lambda ind: ind.fitness.values[1])
stats = tools.MultiStatistics(price=price_stats, time=time_stats)
stats.register("avg", numpy.mean, axis=0)
stats.register("std", numpy.std, axis=0)
stats.register("min", numpy.min, axis=0)
algorithms.eaMuPlusLambda(pop, toolbox, MU, LAMBDA, CXPB, MUTPB, NGEN,
stats, halloffame=hof)
return pop, stats, hof
if __name__ == "__main__":
_, _, hof = main()
from matplotlib import pyplot as plt
error_price = [i.fitness.values[0] for i in hof]
time = [i.fitness.values[1] for i in hof]
plt.plot(error_price, time, 'bo')
plt.xlabel("Price difference")
plt.ylabel("Total time")
plt.show()
| 37.73913 | 107 | 0.700461 |
import random
from operator import attrgetter
from collections import Counter
# attributes. Because we create a class that inherit from the Counter, the
# fitness attribute was not copied by the deepcopy.
del Counter.__reduce__
import numpy
from deap import algorithms
from deap import base
from deap import creator
from deap import tools
IND_INIT_SIZE = 3
# Create the item dictionary: item id is an integer, and value is
# a (name, weight, value) 3-uple. Since the comic didn't specified a time for
ITEMS_NAME = "Mixed Fruit", "French Fries", "Side Salad", "Hot Wings", "Mozzarella Sticks", "Sampler Plate"
ITEMS_PRICE = 2.15, 2.75, 3.35, 3.55, 4.2, 5.8
ITEMS = dict((name, (price, random.uniform(1, 5))) for name, price in zip(ITEMS_NAME, ITEMS_PRICE))
creator.create("Fitness", base.Fitness, weights=(-1.0, -1.0))
creator.create("Individual", Counter, fitness=creator.Fitness)
toolbox = base.Toolbox()
toolbox.register("attr_item", random.choice, ITEMS_NAME)
toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_item, IND_INIT_SIZE)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
def evalXKCD(individual, target_price):
price = 0.0
times = list()
for item, number in individual.items():
price += ITEMS[item][0] * number
times.append(ITEMS[item][1])
return abs(price - target_price), max(times)
def cxCounter(ind1, ind2, indpb):
for key in ITEMS.keys():
if random.random() < indpb:
ind1[key], ind2[key] = ind2[key], ind1[key]
return ind1, ind2
def mutCounter(individual):
if random.random() > 0.5:
individual.update([random.choice(ITEMS_NAME)])
else:
val = random.choice(ITEMS_NAME)
individual.subtract([val])
if individual[val] < 0:
del individual[val]
return individual,
toolbox.register("evaluate", evalXKCD, target_price=15.05)
toolbox.register("mate", cxCounter, indpb=0.5)
toolbox.register("mutate", mutCounter)
toolbox.register("select", tools.selNSGA2)
def main():
NGEN = 40
MU = 100
LAMBDA = 200
CXPB = 0.3
MUTPB = 0.6
pop = toolbox.population(n=MU)
hof = tools.ParetoFront()
price_stats = tools.Statistics(key=lambda ind: ind.fitness.values[0])
time_stats = tools.Statistics(key=lambda ind: ind.fitness.values[1])
stats = tools.MultiStatistics(price=price_stats, time=time_stats)
stats.register("avg", numpy.mean, axis=0)
stats.register("std", numpy.std, axis=0)
stats.register("min", numpy.min, axis=0)
algorithms.eaMuPlusLambda(pop, toolbox, MU, LAMBDA, CXPB, MUTPB, NGEN,
stats, halloffame=hof)
return pop, stats, hof
if __name__ == "__main__":
_, _, hof = main()
from matplotlib import pyplot as plt
error_price = [i.fitness.values[0] for i in hof]
time = [i.fitness.values[1] for i in hof]
plt.plot(error_price, time, 'bo')
plt.xlabel("Price difference")
plt.ylabel("Total time")
plt.show()
| true | true |
f73d3af72113a5cab3585ebe6b786dc9ff9d06cd | 818 | py | Python | mher/common/tests/test_schedules.py | YangRui2015/Modular_HER | 77acca83d6849d140ab893ec1b472b71e1da08d4 | [
"MIT"
] | 13 | 2020-10-31T15:01:44.000Z | 2021-11-30T06:51:15.000Z | mher/common/tests/test_schedules.py | YangRui2015/Modular_HER | 77acca83d6849d140ab893ec1b472b71e1da08d4 | [
"MIT"
] | 1 | 2021-06-22T14:25:07.000Z | 2021-06-23T11:47:37.000Z | mher/common/tests/test_schedules.py | YangRui2015/Modular_HER | 77acca83d6849d140ab893ec1b472b71e1da08d4 | [
"MIT"
] | 2 | 2020-11-24T03:26:32.000Z | 2021-05-14T01:20:57.000Z | import numpy as np
from mher.common.schedules import ConstantSchedule, PiecewiseSchedule
def test_piecewise_schedule():
ps = PiecewiseSchedule([(-5, 100), (5, 200), (10, 50), (100, 50), (200, -50)], outside_value=500)
assert np.isclose(ps.value(-10), 500)
assert np.isclose(ps.value(0), 150)
assert np.isclose(ps.value(5), 200)
assert np.isclose(ps.value(9), 80)
assert np.isclose(ps.value(50), 50)
assert np.isclose(ps.value(80), 50)
assert np.isclose(ps.value(150), 0)
assert np.isclose(ps.value(175), -25)
assert np.isclose(ps.value(201), 500)
assert np.isclose(ps.value(500), 500)
assert np.isclose(ps.value(200 - 1e-10), -50)
def test_constant_schedule():
cs = ConstantSchedule(5)
for i in range(-100, 100):
assert np.isclose(cs.value(i), 5)
| 30.296296 | 101 | 0.661369 | import numpy as np
from mher.common.schedules import ConstantSchedule, PiecewiseSchedule
def test_piecewise_schedule():
ps = PiecewiseSchedule([(-5, 100), (5, 200), (10, 50), (100, 50), (200, -50)], outside_value=500)
assert np.isclose(ps.value(-10), 500)
assert np.isclose(ps.value(0), 150)
assert np.isclose(ps.value(5), 200)
assert np.isclose(ps.value(9), 80)
assert np.isclose(ps.value(50), 50)
assert np.isclose(ps.value(80), 50)
assert np.isclose(ps.value(150), 0)
assert np.isclose(ps.value(175), -25)
assert np.isclose(ps.value(201), 500)
assert np.isclose(ps.value(500), 500)
assert np.isclose(ps.value(200 - 1e-10), -50)
def test_constant_schedule():
cs = ConstantSchedule(5)
for i in range(-100, 100):
assert np.isclose(cs.value(i), 5)
| true | true |
f73d3bccaa66ceecd5d19e2515f4373f729193e3 | 18,073 | py | Python | homeassistant/components/here_travel_time/sensor.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 4 | 2020-07-29T17:47:10.000Z | 2020-09-16T13:39:13.000Z | homeassistant/components/here_travel_time/sensor.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 6 | 2020-11-08T19:40:10.000Z | 2022-03-01T11:11:07.000Z | homeassistant/components/here_travel_time/sensor.py | klauern/home-assistant-core | c18ba6aec0627e6afb6442c678edb5ff2bb17db6 | [
"Apache-2.0"
] | 5 | 2020-03-29T00:29:13.000Z | 2021-09-06T20:58:40.000Z | """Support for HERE travel time sensors."""
from datetime import datetime, timedelta
import logging
from typing import Callable, Dict, Optional, Union
import herepy
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_MODE,
CONF_MODE,
CONF_NAME,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
EVENT_HOMEASSISTANT_START,
TIME_MINUTES,
)
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers import location
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import DiscoveryInfoType
import homeassistant.util.dt as dt
_LOGGER = logging.getLogger(__name__)
CONF_DESTINATION_LATITUDE = "destination_latitude"
CONF_DESTINATION_LONGITUDE = "destination_longitude"
CONF_DESTINATION_ENTITY_ID = "destination_entity_id"
CONF_ORIGIN_LATITUDE = "origin_latitude"
CONF_ORIGIN_LONGITUDE = "origin_longitude"
CONF_ORIGIN_ENTITY_ID = "origin_entity_id"
CONF_API_KEY = "api_key"
CONF_TRAFFIC_MODE = "traffic_mode"
CONF_ROUTE_MODE = "route_mode"
CONF_ARRIVAL = "arrival"
CONF_DEPARTURE = "departure"
DEFAULT_NAME = "HERE Travel Time"
TRAVEL_MODE_BICYCLE = "bicycle"
TRAVEL_MODE_CAR = "car"
TRAVEL_MODE_PEDESTRIAN = "pedestrian"
TRAVEL_MODE_PUBLIC = "publicTransport"
TRAVEL_MODE_PUBLIC_TIME_TABLE = "publicTransportTimeTable"
TRAVEL_MODE_TRUCK = "truck"
TRAVEL_MODE = [
TRAVEL_MODE_BICYCLE,
TRAVEL_MODE_CAR,
TRAVEL_MODE_PEDESTRIAN,
TRAVEL_MODE_PUBLIC,
TRAVEL_MODE_PUBLIC_TIME_TABLE,
TRAVEL_MODE_TRUCK,
]
TRAVEL_MODES_PUBLIC = [TRAVEL_MODE_PUBLIC, TRAVEL_MODE_PUBLIC_TIME_TABLE]
TRAVEL_MODES_VEHICLE = [TRAVEL_MODE_CAR, TRAVEL_MODE_TRUCK]
TRAVEL_MODES_NON_VEHICLE = [TRAVEL_MODE_BICYCLE, TRAVEL_MODE_PEDESTRIAN]
TRAFFIC_MODE_ENABLED = "traffic_enabled"
TRAFFIC_MODE_DISABLED = "traffic_disabled"
ROUTE_MODE_FASTEST = "fastest"
ROUTE_MODE_SHORTEST = "shortest"
ROUTE_MODE = [ROUTE_MODE_FASTEST, ROUTE_MODE_SHORTEST]
ICON_BICYCLE = "mdi:bike"
ICON_CAR = "mdi:car"
ICON_PEDESTRIAN = "mdi:walk"
ICON_PUBLIC = "mdi:bus"
ICON_TRUCK = "mdi:truck"
UNITS = [CONF_UNIT_SYSTEM_METRIC, CONF_UNIT_SYSTEM_IMPERIAL]
ATTR_DURATION = "duration"
ATTR_DISTANCE = "distance"
ATTR_ROUTE = "route"
ATTR_ORIGIN = "origin"
ATTR_DESTINATION = "destination"
ATTR_UNIT_SYSTEM = CONF_UNIT_SYSTEM
ATTR_TRAFFIC_MODE = CONF_TRAFFIC_MODE
ATTR_DURATION_IN_TRAFFIC = "duration_in_traffic"
ATTR_ORIGIN_NAME = "origin_name"
ATTR_DESTINATION_NAME = "destination_name"
SCAN_INTERVAL = timedelta(minutes=5)
NO_ROUTE_ERROR_MESSAGE = "HERE could not find a route based on the input"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Inclusive(
CONF_DESTINATION_LATITUDE, "destination_coordinates"
): cv.latitude,
vol.Inclusive(
CONF_DESTINATION_LONGITUDE, "destination_coordinates"
): cv.longitude,
vol.Exclusive(CONF_DESTINATION_LATITUDE, "destination"): cv.latitude,
vol.Exclusive(CONF_DESTINATION_ENTITY_ID, "destination"): cv.entity_id,
vol.Inclusive(CONF_ORIGIN_LATITUDE, "origin_coordinates"): cv.latitude,
vol.Inclusive(CONF_ORIGIN_LONGITUDE, "origin_coordinates"): cv.longitude,
vol.Exclusive(CONF_ORIGIN_LATITUDE, "origin"): cv.latitude,
vol.Exclusive(CONF_ORIGIN_ENTITY_ID, "origin"): cv.entity_id,
vol.Optional(CONF_DEPARTURE): cv.time,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MODE, default=TRAVEL_MODE_CAR): vol.In(TRAVEL_MODE),
vol.Optional(CONF_ROUTE_MODE, default=ROUTE_MODE_FASTEST): vol.In(ROUTE_MODE),
vol.Optional(CONF_TRAFFIC_MODE, default=False): cv.boolean,
vol.Optional(CONF_UNIT_SYSTEM): vol.In(UNITS),
}
)
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_DESTINATION_LATITUDE, CONF_DESTINATION_ENTITY_ID),
cv.has_at_least_one_key(CONF_ORIGIN_LATITUDE, CONF_ORIGIN_ENTITY_ID),
cv.key_value_schemas(
CONF_MODE,
{
None: PLATFORM_SCHEMA,
TRAVEL_MODE_BICYCLE: PLATFORM_SCHEMA,
TRAVEL_MODE_CAR: PLATFORM_SCHEMA,
TRAVEL_MODE_PEDESTRIAN: PLATFORM_SCHEMA,
TRAVEL_MODE_PUBLIC: PLATFORM_SCHEMA,
TRAVEL_MODE_TRUCK: PLATFORM_SCHEMA,
TRAVEL_MODE_PUBLIC_TIME_TABLE: PLATFORM_SCHEMA.extend(
{
vol.Exclusive(CONF_ARRIVAL, "arrival_departure"): cv.time,
vol.Exclusive(CONF_DEPARTURE, "arrival_departure"): cv.time,
}
),
},
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: Dict[str, Union[str, bool]],
async_add_entities: Callable,
discovery_info: Optional[DiscoveryInfoType] = None,
) -> None:
"""Set up the HERE travel time platform."""
api_key = config[CONF_API_KEY]
here_client = herepy.RoutingApi(api_key)
if not await hass.async_add_executor_job(
_are_valid_client_credentials, here_client
):
_LOGGER.error(
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token."
)
return
if config.get(CONF_ORIGIN_LATITUDE) is not None:
origin = f"{config[CONF_ORIGIN_LATITUDE]},{config[CONF_ORIGIN_LONGITUDE]}"
origin_entity_id = None
else:
origin = None
origin_entity_id = config[CONF_ORIGIN_ENTITY_ID]
if config.get(CONF_DESTINATION_LATITUDE) is not None:
destination = (
f"{config[CONF_DESTINATION_LATITUDE]},{config[CONF_DESTINATION_LONGITUDE]}"
)
destination_entity_id = None
else:
destination = None
destination_entity_id = config[CONF_DESTINATION_ENTITY_ID]
travel_mode = config[CONF_MODE]
traffic_mode = config[CONF_TRAFFIC_MODE]
route_mode = config[CONF_ROUTE_MODE]
name = config[CONF_NAME]
units = config.get(CONF_UNIT_SYSTEM, hass.config.units.name)
arrival = config.get(CONF_ARRIVAL)
departure = config.get(CONF_DEPARTURE)
here_data = HERETravelTimeData(
here_client, travel_mode, traffic_mode, route_mode, units, arrival, departure
)
sensor = HERETravelTimeSensor(
name, origin, destination, origin_entity_id, destination_entity_id, here_data
)
async_add_entities([sensor])
def _are_valid_client_credentials(here_client: herepy.RoutingApi) -> bool:
"""Check if the provided credentials are correct using defaults."""
known_working_origin = [38.9, -77.04833]
known_working_destination = [39.0, -77.1]
try:
here_client.car_route(
known_working_origin,
known_working_destination,
[
herepy.RouteMode[ROUTE_MODE_FASTEST],
herepy.RouteMode[TRAVEL_MODE_CAR],
herepy.RouteMode[TRAFFIC_MODE_DISABLED],
],
)
except herepy.InvalidCredentialsError:
return False
return True
class HERETravelTimeSensor(Entity):
"""Representation of a HERE travel time sensor."""
def __init__(
self,
name: str,
origin: str,
destination: str,
origin_entity_id: str,
destination_entity_id: str,
here_data: "HERETravelTimeData",
) -> None:
"""Initialize the sensor."""
self._name = name
self._origin_entity_id = origin_entity_id
self._destination_entity_id = destination_entity_id
self._here_data = here_data
self._unit_of_measurement = TIME_MINUTES
self._attrs = {
ATTR_UNIT_SYSTEM: self._here_data.units,
ATTR_MODE: self._here_data.travel_mode,
ATTR_TRAFFIC_MODE: self._here_data.traffic_mode,
}
if self._origin_entity_id is None:
self._here_data.origin = origin
if self._destination_entity_id is None:
self._here_data.destination = destination
async def async_added_to_hass(self) -> None:
"""Delay the sensor update to avoid entity not found warnings."""
@callback
def delayed_sensor_update(event):
"""Update sensor after Home Assistant started."""
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, delayed_sensor_update
)
@property
def state(self) -> Optional[str]:
"""Return the state of the sensor."""
if self._here_data.traffic_mode:
if self._here_data.traffic_time is not None:
return str(round(self._here_data.traffic_time / 60))
if self._here_data.base_time is not None:
return str(round(self._here_data.base_time / 60))
return None
@property
def name(self) -> str:
"""Get the name of the sensor."""
return self._name
@property
def device_state_attributes(
self,
) -> Optional[Dict[str, Union[None, float, str, bool]]]:
"""Return the state attributes."""
if self._here_data.base_time is None:
return None
res = self._attrs
if self._here_data.attribution is not None:
res[ATTR_ATTRIBUTION] = self._here_data.attribution
res[ATTR_DURATION] = self._here_data.base_time / 60
res[ATTR_DISTANCE] = self._here_data.distance
res[ATTR_ROUTE] = self._here_data.route
res[ATTR_DURATION_IN_TRAFFIC] = self._here_data.traffic_time / 60
res[ATTR_ORIGIN] = self._here_data.origin
res[ATTR_DESTINATION] = self._here_data.destination
res[ATTR_ORIGIN_NAME] = self._here_data.origin_name
res[ATTR_DESTINATION_NAME] = self._here_data.destination_name
return res
@property
def unit_of_measurement(self) -> str:
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def icon(self) -> str:
"""Icon to use in the frontend depending on travel_mode."""
if self._here_data.travel_mode == TRAVEL_MODE_BICYCLE:
return ICON_BICYCLE
if self._here_data.travel_mode == TRAVEL_MODE_PEDESTRIAN:
return ICON_PEDESTRIAN
if self._here_data.travel_mode in TRAVEL_MODES_PUBLIC:
return ICON_PUBLIC
if self._here_data.travel_mode == TRAVEL_MODE_TRUCK:
return ICON_TRUCK
return ICON_CAR
async def async_update(self) -> None:
"""Update Sensor Information."""
# Convert device_trackers to HERE friendly location
if self._origin_entity_id is not None:
self._here_data.origin = await self._get_location_from_entity(
self._origin_entity_id
)
if self._destination_entity_id is not None:
self._here_data.destination = await self._get_location_from_entity(
self._destination_entity_id
)
await self.hass.async_add_executor_job(self._here_data.update)
async def _get_location_from_entity(self, entity_id: str) -> Optional[str]:
"""Get the location from the entity state or attributes."""
entity = self.hass.states.get(entity_id)
if entity is None:
_LOGGER.error("Unable to find entity %s", entity_id)
return None
# Check if the entity has location attributes
if location.has_location(entity):
return self._get_location_from_attributes(entity)
# Check if device is in a zone
zone_entity = self.hass.states.get(f"zone.{entity.state}")
if location.has_location(zone_entity):
_LOGGER.debug(
"%s is in %s, getting zone location", entity_id, zone_entity.entity_id
)
return self._get_location_from_attributes(zone_entity)
# Check if state is valid coordinate set
if self._entity_state_is_valid_coordinate_set(entity.state):
return entity.state
_LOGGER.error(
"The state of %s is not a valid set of coordinates: %s",
entity_id,
entity.state,
)
return None
@staticmethod
def _entity_state_is_valid_coordinate_set(state: str) -> bool:
"""Check that the given string is a valid set of coordinates."""
schema = vol.Schema(cv.gps)
try:
coordinates = state.split(",")
schema(coordinates)
return True
except (vol.MultipleInvalid):
return False
@staticmethod
def _get_location_from_attributes(entity: State) -> str:
"""Get the lat/long string from an entities attributes."""
attr = entity.attributes
return f"{attr.get(ATTR_LATITUDE)},{attr.get(ATTR_LONGITUDE)}"
class HERETravelTimeData:
"""HERETravelTime data object."""
def __init__(
self,
here_client: herepy.RoutingApi,
travel_mode: str,
traffic_mode: bool,
route_mode: str,
units: str,
arrival: datetime,
departure: datetime,
) -> None:
"""Initialize herepy."""
self.origin = None
self.destination = None
self.travel_mode = travel_mode
self.traffic_mode = traffic_mode
self.route_mode = route_mode
self.arrival = arrival
self.departure = departure
self.attribution = None
self.traffic_time = None
self.distance = None
self.route = None
self.base_time = None
self.origin_name = None
self.destination_name = None
self.units = units
self._client = here_client
self.combine_change = True
def update(self) -> None:
"""Get the latest data from HERE."""
if self.traffic_mode:
traffic_mode = TRAFFIC_MODE_ENABLED
else:
traffic_mode = TRAFFIC_MODE_DISABLED
if self.destination is not None and self.origin is not None:
# Convert location to HERE friendly location
destination = self.destination.split(",")
origin = self.origin.split(",")
arrival = self.arrival
if arrival is not None:
arrival = convert_time_to_isodate(arrival)
departure = self.departure
if departure is not None:
departure = convert_time_to_isodate(departure)
if departure is None and arrival is None:
departure = "now"
_LOGGER.debug(
"Requesting route for origin: %s, destination: %s, route_mode: %s, mode: %s, traffic_mode: %s, arrival: %s, departure: %s",
origin,
destination,
herepy.RouteMode[self.route_mode],
herepy.RouteMode[self.travel_mode],
herepy.RouteMode[traffic_mode],
arrival,
departure,
)
try:
response = self._client.public_transport_timetable(
origin,
destination,
self.combine_change,
[
herepy.RouteMode[self.route_mode],
herepy.RouteMode[self.travel_mode],
herepy.RouteMode[traffic_mode],
],
arrival=arrival,
departure=departure,
)
except herepy.NoRouteFoundError:
# Better error message for cryptic no route error codes
_LOGGER.error(NO_ROUTE_ERROR_MESSAGE)
return
_LOGGER.debug("Raw response is: %s", response.response)
# pylint: disable=no-member
source_attribution = response.response.get("sourceAttribution")
if source_attribution is not None:
self.attribution = self._build_hass_attribution(source_attribution)
# pylint: disable=no-member
route = response.response["route"]
summary = route[0]["summary"]
waypoint = route[0]["waypoint"]
self.base_time = summary["baseTime"]
if self.travel_mode in TRAVEL_MODES_VEHICLE:
self.traffic_time = summary["trafficTime"]
else:
self.traffic_time = self.base_time
distance = summary["distance"]
if self.units == CONF_UNIT_SYSTEM_IMPERIAL:
# Convert to miles.
self.distance = distance / 1609.344
else:
# Convert to kilometers
self.distance = distance / 1000
# pylint: disable=no-member
self.route = response.route_short
self.origin_name = waypoint[0]["mappedRoadName"]
self.destination_name = waypoint[1]["mappedRoadName"]
@staticmethod
def _build_hass_attribution(source_attribution: Dict) -> Optional[str]:
"""Build a hass frontend ready string out of the sourceAttribution."""
suppliers = source_attribution.get("supplier")
if suppliers is not None:
supplier_titles = []
for supplier in suppliers:
title = supplier.get("title")
if title is not None:
supplier_titles.append(title)
joined_supplier_titles = ",".join(supplier_titles)
attribution = f"With the support of {joined_supplier_titles}. All information is provided without warranty of any kind."
return attribution
def convert_time_to_isodate(timestr: str) -> str:
"""Take a string like 08:00:00 and combine it with the current date."""
combined = datetime.combine(dt.start_of_local_day(), dt.parse_time(timestr))
if combined < datetime.now():
combined = combined + timedelta(days=1)
return combined.isoformat()
| 35.646943 | 139 | 0.655176 | from datetime import datetime, timedelta
import logging
from typing import Callable, Dict, Optional, Union
import herepy
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_MODE,
CONF_MODE,
CONF_NAME,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
EVENT_HOMEASSISTANT_START,
TIME_MINUTES,
)
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers import location
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import DiscoveryInfoType
import homeassistant.util.dt as dt
_LOGGER = logging.getLogger(__name__)
CONF_DESTINATION_LATITUDE = "destination_latitude"
CONF_DESTINATION_LONGITUDE = "destination_longitude"
CONF_DESTINATION_ENTITY_ID = "destination_entity_id"
CONF_ORIGIN_LATITUDE = "origin_latitude"
CONF_ORIGIN_LONGITUDE = "origin_longitude"
CONF_ORIGIN_ENTITY_ID = "origin_entity_id"
CONF_API_KEY = "api_key"
CONF_TRAFFIC_MODE = "traffic_mode"
CONF_ROUTE_MODE = "route_mode"
CONF_ARRIVAL = "arrival"
CONF_DEPARTURE = "departure"
DEFAULT_NAME = "HERE Travel Time"
TRAVEL_MODE_BICYCLE = "bicycle"
TRAVEL_MODE_CAR = "car"
TRAVEL_MODE_PEDESTRIAN = "pedestrian"
TRAVEL_MODE_PUBLIC = "publicTransport"
TRAVEL_MODE_PUBLIC_TIME_TABLE = "publicTransportTimeTable"
TRAVEL_MODE_TRUCK = "truck"
TRAVEL_MODE = [
TRAVEL_MODE_BICYCLE,
TRAVEL_MODE_CAR,
TRAVEL_MODE_PEDESTRIAN,
TRAVEL_MODE_PUBLIC,
TRAVEL_MODE_PUBLIC_TIME_TABLE,
TRAVEL_MODE_TRUCK,
]
TRAVEL_MODES_PUBLIC = [TRAVEL_MODE_PUBLIC, TRAVEL_MODE_PUBLIC_TIME_TABLE]
TRAVEL_MODES_VEHICLE = [TRAVEL_MODE_CAR, TRAVEL_MODE_TRUCK]
TRAVEL_MODES_NON_VEHICLE = [TRAVEL_MODE_BICYCLE, TRAVEL_MODE_PEDESTRIAN]
TRAFFIC_MODE_ENABLED = "traffic_enabled"
TRAFFIC_MODE_DISABLED = "traffic_disabled"
ROUTE_MODE_FASTEST = "fastest"
ROUTE_MODE_SHORTEST = "shortest"
ROUTE_MODE = [ROUTE_MODE_FASTEST, ROUTE_MODE_SHORTEST]
ICON_BICYCLE = "mdi:bike"
ICON_CAR = "mdi:car"
ICON_PEDESTRIAN = "mdi:walk"
ICON_PUBLIC = "mdi:bus"
ICON_TRUCK = "mdi:truck"
UNITS = [CONF_UNIT_SYSTEM_METRIC, CONF_UNIT_SYSTEM_IMPERIAL]
ATTR_DURATION = "duration"
ATTR_DISTANCE = "distance"
ATTR_ROUTE = "route"
ATTR_ORIGIN = "origin"
ATTR_DESTINATION = "destination"
ATTR_UNIT_SYSTEM = CONF_UNIT_SYSTEM
ATTR_TRAFFIC_MODE = CONF_TRAFFIC_MODE
ATTR_DURATION_IN_TRAFFIC = "duration_in_traffic"
ATTR_ORIGIN_NAME = "origin_name"
ATTR_DESTINATION_NAME = "destination_name"
SCAN_INTERVAL = timedelta(minutes=5)
NO_ROUTE_ERROR_MESSAGE = "HERE could not find a route based on the input"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Inclusive(
CONF_DESTINATION_LATITUDE, "destination_coordinates"
): cv.latitude,
vol.Inclusive(
CONF_DESTINATION_LONGITUDE, "destination_coordinates"
): cv.longitude,
vol.Exclusive(CONF_DESTINATION_LATITUDE, "destination"): cv.latitude,
vol.Exclusive(CONF_DESTINATION_ENTITY_ID, "destination"): cv.entity_id,
vol.Inclusive(CONF_ORIGIN_LATITUDE, "origin_coordinates"): cv.latitude,
vol.Inclusive(CONF_ORIGIN_LONGITUDE, "origin_coordinates"): cv.longitude,
vol.Exclusive(CONF_ORIGIN_LATITUDE, "origin"): cv.latitude,
vol.Exclusive(CONF_ORIGIN_ENTITY_ID, "origin"): cv.entity_id,
vol.Optional(CONF_DEPARTURE): cv.time,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MODE, default=TRAVEL_MODE_CAR): vol.In(TRAVEL_MODE),
vol.Optional(CONF_ROUTE_MODE, default=ROUTE_MODE_FASTEST): vol.In(ROUTE_MODE),
vol.Optional(CONF_TRAFFIC_MODE, default=False): cv.boolean,
vol.Optional(CONF_UNIT_SYSTEM): vol.In(UNITS),
}
)
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_DESTINATION_LATITUDE, CONF_DESTINATION_ENTITY_ID),
cv.has_at_least_one_key(CONF_ORIGIN_LATITUDE, CONF_ORIGIN_ENTITY_ID),
cv.key_value_schemas(
CONF_MODE,
{
None: PLATFORM_SCHEMA,
TRAVEL_MODE_BICYCLE: PLATFORM_SCHEMA,
TRAVEL_MODE_CAR: PLATFORM_SCHEMA,
TRAVEL_MODE_PEDESTRIAN: PLATFORM_SCHEMA,
TRAVEL_MODE_PUBLIC: PLATFORM_SCHEMA,
TRAVEL_MODE_TRUCK: PLATFORM_SCHEMA,
TRAVEL_MODE_PUBLIC_TIME_TABLE: PLATFORM_SCHEMA.extend(
{
vol.Exclusive(CONF_ARRIVAL, "arrival_departure"): cv.time,
vol.Exclusive(CONF_DEPARTURE, "arrival_departure"): cv.time,
}
),
},
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: Dict[str, Union[str, bool]],
async_add_entities: Callable,
discovery_info: Optional[DiscoveryInfoType] = None,
) -> None:
api_key = config[CONF_API_KEY]
here_client = herepy.RoutingApi(api_key)
if not await hass.async_add_executor_job(
_are_valid_client_credentials, here_client
):
_LOGGER.error(
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token."
)
return
if config.get(CONF_ORIGIN_LATITUDE) is not None:
origin = f"{config[CONF_ORIGIN_LATITUDE]},{config[CONF_ORIGIN_LONGITUDE]}"
origin_entity_id = None
else:
origin = None
origin_entity_id = config[CONF_ORIGIN_ENTITY_ID]
if config.get(CONF_DESTINATION_LATITUDE) is not None:
destination = (
f"{config[CONF_DESTINATION_LATITUDE]},{config[CONF_DESTINATION_LONGITUDE]}"
)
destination_entity_id = None
else:
destination = None
destination_entity_id = config[CONF_DESTINATION_ENTITY_ID]
travel_mode = config[CONF_MODE]
traffic_mode = config[CONF_TRAFFIC_MODE]
route_mode = config[CONF_ROUTE_MODE]
name = config[CONF_NAME]
units = config.get(CONF_UNIT_SYSTEM, hass.config.units.name)
arrival = config.get(CONF_ARRIVAL)
departure = config.get(CONF_DEPARTURE)
here_data = HERETravelTimeData(
here_client, travel_mode, traffic_mode, route_mode, units, arrival, departure
)
sensor = HERETravelTimeSensor(
name, origin, destination, origin_entity_id, destination_entity_id, here_data
)
async_add_entities([sensor])
def _are_valid_client_credentials(here_client: herepy.RoutingApi) -> bool:
known_working_origin = [38.9, -77.04833]
known_working_destination = [39.0, -77.1]
try:
here_client.car_route(
known_working_origin,
known_working_destination,
[
herepy.RouteMode[ROUTE_MODE_FASTEST],
herepy.RouteMode[TRAVEL_MODE_CAR],
herepy.RouteMode[TRAFFIC_MODE_DISABLED],
],
)
except herepy.InvalidCredentialsError:
return False
return True
class HERETravelTimeSensor(Entity):
def __init__(
self,
name: str,
origin: str,
destination: str,
origin_entity_id: str,
destination_entity_id: str,
here_data: "HERETravelTimeData",
) -> None:
self._name = name
self._origin_entity_id = origin_entity_id
self._destination_entity_id = destination_entity_id
self._here_data = here_data
self._unit_of_measurement = TIME_MINUTES
self._attrs = {
ATTR_UNIT_SYSTEM: self._here_data.units,
ATTR_MODE: self._here_data.travel_mode,
ATTR_TRAFFIC_MODE: self._here_data.traffic_mode,
}
if self._origin_entity_id is None:
self._here_data.origin = origin
if self._destination_entity_id is None:
self._here_data.destination = destination
async def async_added_to_hass(self) -> None:
@callback
def delayed_sensor_update(event):
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, delayed_sensor_update
)
@property
def state(self) -> Optional[str]:
if self._here_data.traffic_mode:
if self._here_data.traffic_time is not None:
return str(round(self._here_data.traffic_time / 60))
if self._here_data.base_time is not None:
return str(round(self._here_data.base_time / 60))
return None
@property
def name(self) -> str:
return self._name
@property
def device_state_attributes(
self,
) -> Optional[Dict[str, Union[None, float, str, bool]]]:
if self._here_data.base_time is None:
return None
res = self._attrs
if self._here_data.attribution is not None:
res[ATTR_ATTRIBUTION] = self._here_data.attribution
res[ATTR_DURATION] = self._here_data.base_time / 60
res[ATTR_DISTANCE] = self._here_data.distance
res[ATTR_ROUTE] = self._here_data.route
res[ATTR_DURATION_IN_TRAFFIC] = self._here_data.traffic_time / 60
res[ATTR_ORIGIN] = self._here_data.origin
res[ATTR_DESTINATION] = self._here_data.destination
res[ATTR_ORIGIN_NAME] = self._here_data.origin_name
res[ATTR_DESTINATION_NAME] = self._here_data.destination_name
return res
@property
def unit_of_measurement(self) -> str:
return self._unit_of_measurement
@property
def icon(self) -> str:
if self._here_data.travel_mode == TRAVEL_MODE_BICYCLE:
return ICON_BICYCLE
if self._here_data.travel_mode == TRAVEL_MODE_PEDESTRIAN:
return ICON_PEDESTRIAN
if self._here_data.travel_mode in TRAVEL_MODES_PUBLIC:
return ICON_PUBLIC
if self._here_data.travel_mode == TRAVEL_MODE_TRUCK:
return ICON_TRUCK
return ICON_CAR
async def async_update(self) -> None:
if self._origin_entity_id is not None:
self._here_data.origin = await self._get_location_from_entity(
self._origin_entity_id
)
if self._destination_entity_id is not None:
self._here_data.destination = await self._get_location_from_entity(
self._destination_entity_id
)
await self.hass.async_add_executor_job(self._here_data.update)
async def _get_location_from_entity(self, entity_id: str) -> Optional[str]:
entity = self.hass.states.get(entity_id)
if entity is None:
_LOGGER.error("Unable to find entity %s", entity_id)
return None
if location.has_location(entity):
return self._get_location_from_attributes(entity)
zone_entity = self.hass.states.get(f"zone.{entity.state}")
if location.has_location(zone_entity):
_LOGGER.debug(
"%s is in %s, getting zone location", entity_id, zone_entity.entity_id
)
return self._get_location_from_attributes(zone_entity)
if self._entity_state_is_valid_coordinate_set(entity.state):
return entity.state
_LOGGER.error(
"The state of %s is not a valid set of coordinates: %s",
entity_id,
entity.state,
)
return None
@staticmethod
def _entity_state_is_valid_coordinate_set(state: str) -> bool:
schema = vol.Schema(cv.gps)
try:
coordinates = state.split(",")
schema(coordinates)
return True
except (vol.MultipleInvalid):
return False
@staticmethod
def _get_location_from_attributes(entity: State) -> str:
attr = entity.attributes
return f"{attr.get(ATTR_LATITUDE)},{attr.get(ATTR_LONGITUDE)}"
class HERETravelTimeData:
def __init__(
self,
here_client: herepy.RoutingApi,
travel_mode: str,
traffic_mode: bool,
route_mode: str,
units: str,
arrival: datetime,
departure: datetime,
) -> None:
self.origin = None
self.destination = None
self.travel_mode = travel_mode
self.traffic_mode = traffic_mode
self.route_mode = route_mode
self.arrival = arrival
self.departure = departure
self.attribution = None
self.traffic_time = None
self.distance = None
self.route = None
self.base_time = None
self.origin_name = None
self.destination_name = None
self.units = units
self._client = here_client
self.combine_change = True
def update(self) -> None:
if self.traffic_mode:
traffic_mode = TRAFFIC_MODE_ENABLED
else:
traffic_mode = TRAFFIC_MODE_DISABLED
if self.destination is not None and self.origin is not None:
destination = self.destination.split(",")
origin = self.origin.split(",")
arrival = self.arrival
if arrival is not None:
arrival = convert_time_to_isodate(arrival)
departure = self.departure
if departure is not None:
departure = convert_time_to_isodate(departure)
if departure is None and arrival is None:
departure = "now"
_LOGGER.debug(
"Requesting route for origin: %s, destination: %s, route_mode: %s, mode: %s, traffic_mode: %s, arrival: %s, departure: %s",
origin,
destination,
herepy.RouteMode[self.route_mode],
herepy.RouteMode[self.travel_mode],
herepy.RouteMode[traffic_mode],
arrival,
departure,
)
try:
response = self._client.public_transport_timetable(
origin,
destination,
self.combine_change,
[
herepy.RouteMode[self.route_mode],
herepy.RouteMode[self.travel_mode],
herepy.RouteMode[traffic_mode],
],
arrival=arrival,
departure=departure,
)
except herepy.NoRouteFoundError:
_LOGGER.error(NO_ROUTE_ERROR_MESSAGE)
return
_LOGGER.debug("Raw response is: %s", response.response)
source_attribution = response.response.get("sourceAttribution")
if source_attribution is not None:
self.attribution = self._build_hass_attribution(source_attribution)
route = response.response["route"]
summary = route[0]["summary"]
waypoint = route[0]["waypoint"]
self.base_time = summary["baseTime"]
if self.travel_mode in TRAVEL_MODES_VEHICLE:
self.traffic_time = summary["trafficTime"]
else:
self.traffic_time = self.base_time
distance = summary["distance"]
if self.units == CONF_UNIT_SYSTEM_IMPERIAL:
self.distance = distance / 1609.344
else:
self.distance = distance / 1000
self.route = response.route_short
self.origin_name = waypoint[0]["mappedRoadName"]
self.destination_name = waypoint[1]["mappedRoadName"]
@staticmethod
def _build_hass_attribution(source_attribution: Dict) -> Optional[str]:
suppliers = source_attribution.get("supplier")
if suppliers is not None:
supplier_titles = []
for supplier in suppliers:
title = supplier.get("title")
if title is not None:
supplier_titles.append(title)
joined_supplier_titles = ",".join(supplier_titles)
attribution = f"With the support of {joined_supplier_titles}. All information is provided without warranty of any kind."
return attribution
def convert_time_to_isodate(timestr: str) -> str:
combined = datetime.combine(dt.start_of_local_day(), dt.parse_time(timestr))
if combined < datetime.now():
combined = combined + timedelta(days=1)
return combined.isoformat()
| true | true |
f73d3dd584cecf1cb71ceacab21635e3578dcecf | 14,681 | py | Python | tensorflow_model_analysis/evaluators/aggregate.py | BioGeek/model-analysis | 03db02c21e21b092bc409c8bf263174b90c4e2ae | [
"Apache-2.0"
] | null | null | null | tensorflow_model_analysis/evaluators/aggregate.py | BioGeek/model-analysis | 03db02c21e21b092bc409c8bf263174b90c4e2ae | [
"Apache-2.0"
] | null | null | null | tensorflow_model_analysis/evaluators/aggregate.py | BioGeek/model-analysis | 03db02c21e21b092bc409c8bf263174b90c4e2ae | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public API for performing evaluations using the EvalMetricsGraph."""
from __future__ import absolute_import
from __future__ import division
# Standard __future__ imports
from __future__ import print_function
from typing import Any, Dict, Generator, Iterable, List, Optional, Text, Tuple, Union
import apache_beam as beam
import numpy as np
from tensorflow_model_analysis import constants
from tensorflow_model_analysis import model_util
from tensorflow_model_analysis import types
from tensorflow_model_analysis.eval_metrics_graph import eval_metrics_graph
from tensorflow_model_analysis.slicer import slicer_lib as slicer
@beam.ptransform_fn
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType, types.Extracts])
@beam.typehints.with_output_types(Tuple[slicer.SliceKeyType, Dict[Text, Any]])
def ComputePerSliceMetrics( # pylint: disable=invalid-name
slice_result: beam.pvalue.PCollection,
eval_shared_model: types.EvalSharedModel,
desired_batch_size: Optional[int] = None,
compute_with_sampling: Optional[bool] = False,
random_seed_for_testing: Optional[int] = None) -> beam.pvalue.PCollection:
"""PTransform for computing, aggregating and combining metrics.
Args:
slice_result: Incoming PCollection consisting of slice key and extracts.
eval_shared_model: Shared model parameters for EvalSavedModel.
desired_batch_size: Optional batch size for batching in Aggregate.
compute_with_sampling: True to compute with sampling.
random_seed_for_testing: Seed to use for unit testing.
Returns:
PCollection of (slice key, dict of metrics).
"""
# TODO(b/123516222): Remove this workaround per discussions in CL/227944001
slice_result.element_type = beam.typehints.Any
return (
slice_result
# _ModelLoadingIdentityFn loads the EvalSavedModel into memory
# under a shared handle that can be used by subsequent steps.
# Combiner lifting and producer-consumer fusion should ensure
# that these steps run in the same process and memory space.
# TODO(b/69566045): Remove _ModelLoadingIdentityFn and move model
# loading to CombineFn.setup after it is available in Beam.
| 'LoadModel' >> beam.ParDo(
_ModelLoadingIdentityFn(eval_shared_model=eval_shared_model))
| 'CombinePerSlice' >> beam.CombinePerKey(
_AggregateCombineFn(
eval_shared_model=eval_shared_model,
desired_batch_size=desired_batch_size,
compute_with_sampling=compute_with_sampling,
seed_for_testing=random_seed_for_testing))
| 'InterpretOutput' >> beam.ParDo(
_ExtractOutputDoFn(eval_shared_model=eval_shared_model)))
def _add_metric_variables( # pylint: disable=invalid-name
left: types.MetricVariablesType,
right: types.MetricVariablesType) -> types.MetricVariablesType:
"""Returns left and right metric variables combined."""
if left is not None and right is not None:
if len(left) != len(right):
raise ValueError('metric variables lengths should match, but got '
'%d and %d' % (len(left), len(right)))
return [x + y for x, y in zip(left, right)]
elif left is not None:
return left
else:
return right
class _AggState(object):
"""Combine state for AggregateCombineFn.
There are two parts to the state: the metric variables (the actual state),
and a list of FeaturesPredictionsLabels or other inputs. See
_AggregateCombineFn for why we need this.
"""
__slots__ = ['metric_variables', 'inputs']
def __init__(self):
self.metric_variables = None # type: Optional[types.MetricVariablesType]
self.inputs = [
] # type: List[Union[bytes, types.FeaturesPredictionsLabels]]
def copy_from( # pylint: disable=invalid-name
self, other: '_AggState') -> None:
if other.metric_variables:
self.metric_variables = other.metric_variables
self.inputs = other.inputs
def __iadd__(self, other: '_AggState') -> '_AggState':
self.metric_variables = _add_metric_variables(self.metric_variables,
other.metric_variables)
self.inputs.extend(other.inputs)
return self
def add_input(self, new_input) -> None:
self.inputs.append(new_input)
def add_metrics_variables( # pylint: disable=invalid-name
self, metric_variables: types.MetricVariablesType) -> None:
self.metric_variables = _add_metric_variables(self.metric_variables,
metric_variables)
@beam.typehints.with_input_types(types.Extracts)
@beam.typehints.with_output_types(Optional[List[Any]])
class _AggregateCombineFn(model_util.CombineFnWithModels):
"""Aggregate combine function.
This function really does three things:
1. Batching of FeaturesPredictionsLabels.
3. "Partial reduction" of these batches by sending this through the
"intro metrics" step.
3. The "normal" combining of MetricVariables.
What we really want to do is conceptually the following:
Predictions | GroupByKey() | KeyAwareBatchElements()
| ParDo(IntroMetrics()) | CombineValues(CombineMetricVariables()).
but there's no way to KeyAwareBatchElements in Beam, and no way to do partial
reductions either. Hence, this CombineFn has to do the work of batching,
partial reduction (intro metrics), and actual combining, all in one.
We do this by accumulating FeaturesPredictionsLabels in the combine state
until we accumulate a large enough batch, at which point we send them
through the "intro metrics" step. When merging, we merge the metric variables
and accumulate FeaturesPredictionsLabels accordingly. We do one final
"intro metrics" and merge step before producing the final output value.
See also:
BEAM-3737: Key-aware batching function
(https://issues.apache.org/jira/browse/BEAM-3737).
"""
# This needs to be large enough to allow for efficient TF invocations during
# batch flushing, but shouldn't be too large as it also acts as cap on the
# maximum memory usage of the computation.
_DEFAULT_DESIRED_BATCH_SIZE = 1000
def __init__(self,
eval_shared_model: types.EvalSharedModel,
desired_batch_size: Optional[int] = None,
compute_with_sampling: Optional[bool] = False,
seed_for_testing: Optional[int] = None) -> None:
super(_AggregateCombineFn,
self).__init__({'': eval_shared_model.model_loader})
self._seed_for_testing = seed_for_testing
self._eval_metrics_graph = None # type: eval_metrics_graph.EvalMetricsGraph
# We really want the batch size to be adaptive like it is in
# beam.BatchElements(), but there isn't an easy way to make it so.
# TODO(b/73789023): Figure out how to make this batch size dynamic.
if desired_batch_size and desired_batch_size > 0:
self._desired_batch_size = desired_batch_size
else:
self._desired_batch_size = self._DEFAULT_DESIRED_BATCH_SIZE
self._compute_with_sampling = compute_with_sampling
self._random_state = np.random.RandomState(seed_for_testing)
# Metrics.
self._combine_batch_size = beam.metrics.Metrics.distribution(
constants.METRICS_NAMESPACE, 'combine_batch_size')
self._num_compacts = beam.metrics.Metrics.counter(
constants.METRICS_NAMESPACE, 'num_compacts')
def _poissonify(self, accumulator: _AggState) -> List[bytes]:
# pylint: disable=line-too-long
"""Creates a bootstrap resample of the data in an accumulator.
Given a set of data, it will be represented in the resample set a number of
times, that number of times is drawn from Poisson(1).
See
http://www.unofficialgoogledatascience.com/2015/08/an-introduction-to-poisson-bootstrap26.html
for a detailed explanation of the technique. This will work technically with
small or empty batches but as the technique is an approximation, the
approximation gets better as the number of examples gets larger. If the
results themselves are empty TFMA will reject the sample. For any samples of
a reasonable size, the chances of this are exponentially tiny. See "The
mathematical fine print" section of the blog post linked above.
Args:
accumulator: Accumulator containing FPLs from a sample
Returns:
A list of FPLs representing a bootstrap resample of the accumulator items.
"""
result = []
if accumulator.inputs:
poisson_counts = self._random_state.poisson(1, len(accumulator.inputs))
for i, input_item in enumerate(accumulator.inputs):
result.extend([input_item] * poisson_counts[i])
return result
def _maybe_do_batch(self,
accumulator: _AggState,
force: bool = False) -> None:
"""Maybe intro metrics and update accumulator in place.
Checks if accumulator has enough FPLs for a batch, and if so, does the
intro metrics for the batch and updates accumulator in place.
Args:
accumulator: Accumulator. Will be updated in place.
force: Force intro metrics even if accumulator has less FPLs than the
batch size.
"""
if self._eval_metrics_graph is None:
self._setup_if_needed()
self._eval_metrics_graph = self._loaded_models['']
batch_size = len(accumulator.inputs)
if force or batch_size >= self._desired_batch_size:
if accumulator.inputs:
self._combine_batch_size.update(batch_size)
inputs_for_metrics = accumulator.inputs
if self._compute_with_sampling:
# If we are computing with multiple bootstrap replicates, use fpls
# generated by the Poisson bootstrapping technique.
inputs_for_metrics = self._poissonify(accumulator)
if inputs_for_metrics:
accumulator.add_metrics_variables(
self._eval_metrics_graph.metrics_reset_update_get_list(
inputs_for_metrics))
else:
# Call to metrics_reset_update_get_list does a reset prior to the
# metrics update, but does not handle empty updates. Explicitly
# calling just reset here, to make the flow clear.
self._eval_metrics_graph.reset_metric_variables()
del accumulator.inputs[:]
def create_accumulator(self) -> _AggState:
return _AggState()
def add_input(self, accumulator: _AggState,
elem: types.Extracts) -> _AggState:
accumulator.add_input(elem[constants.INPUT_KEY])
self._maybe_do_batch(accumulator)
return accumulator
def merge_accumulators(self, accumulators: Iterable[_AggState]) -> _AggState:
result = self.create_accumulator()
for acc in accumulators:
result += acc
# Compact within the loop to avoid accumulating too much data.
#
# During the "map" side of combining merging happens with memory limits
# but on the "reduce" side it's across all bundles (for a given key).
#
# So we could potentially accumulate get num_bundles * batch_size
# elements if we don't process the batches within the loop, which
# could cause OOM errors (b/77293756).
self._maybe_do_batch(result)
return result
def compact(self, accumulator: _AggState) -> _AggState:
self._maybe_do_batch(accumulator, force=True) # Guaranteed compaction.
self._num_compacts.inc(1)
return accumulator
def extract_output(
self, accumulator: _AggState) -> Optional[types.MetricVariablesType]:
# It's possible that the accumulator has not been fully flushed, if it was
# not produced by a call to compact (which is not guaranteed across all Beam
# Runners), so we defensively flush it here again, before we extract data
# from it, to ensure correctness.
self._maybe_do_batch(accumulator, force=True)
return accumulator.metric_variables
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType,
Optional[List[Any]]])
# TODO(b/123516222): Add output typehints. Similarly elsewhere that it applies.
class _ExtractOutputDoFn(model_util.DoFnWithModels):
"""A DoFn that extracts the metrics output."""
def __init__(self, eval_shared_model: types.EvalSharedModel) -> None:
super(_ExtractOutputDoFn,
self).__init__({'': eval_shared_model.model_loader})
# This keeps track of the number of times the poisson bootstrap encounters
# an empty set of elements for a slice sample. Should be extremely rare in
# practice, keeping this counter will help us understand if something is
# misbehaving.
self._num_bootstrap_empties = beam.metrics.Metrics.counter(
constants.METRICS_NAMESPACE, 'num_bootstrap_empties')
def process(
self, element: Tuple[slicer.SliceKeyType, types.MetricVariablesType]
) -> Generator[Tuple[slicer.SliceKeyType, Dict[Text, Any]], None, None]:
(slice_key, metric_variables) = element
if metric_variables:
eval_saved_model = self._loaded_models['']
result = eval_saved_model.metrics_set_variables_and_get_values(
metric_variables)
yield (slice_key, result)
else:
# Increase a counter for empty bootstrap samples. When sampling is not
# enabled, this should never be exected. This should only occur when the
# slice sizes are incredibly small, and seeing large values of this
# counter is a sign that something has gone wrong.
self._num_bootstrap_empties.inc(1)
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType, types.Extracts])
@beam.typehints.with_output_types(Tuple[slicer.SliceKeyType, types.Extracts])
class _ModelLoadingIdentityFn(model_util.DoFnWithModels):
"""A DoFn that loads the EvalSavedModel and returns the input unchanged."""
def __init__(self, eval_shared_model: types.EvalSharedModel) -> None:
super(_ModelLoadingIdentityFn,
self).__init__({'': eval_shared_model.model_loader})
def process(
self, element: Tuple[slicer.SliceKeyType, types.Extracts]
) -> List[Tuple[slicer.SliceKeyType, types.Extracts]]:
return [element]
| 43.052786 | 98 | 0.727539 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Any, Dict, Generator, Iterable, List, Optional, Text, Tuple, Union
import apache_beam as beam
import numpy as np
from tensorflow_model_analysis import constants
from tensorflow_model_analysis import model_util
from tensorflow_model_analysis import types
from tensorflow_model_analysis.eval_metrics_graph import eval_metrics_graph
from tensorflow_model_analysis.slicer import slicer_lib as slicer
@beam.ptransform_fn
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType, types.Extracts])
@beam.typehints.with_output_types(Tuple[slicer.SliceKeyType, Dict[Text, Any]])
def ComputePerSliceMetrics(
slice_result: beam.pvalue.PCollection,
eval_shared_model: types.EvalSharedModel,
desired_batch_size: Optional[int] = None,
compute_with_sampling: Optional[bool] = False,
random_seed_for_testing: Optional[int] = None) -> beam.pvalue.PCollection:
slice_result.element_type = beam.typehints.Any
return (
slice_result
| 'LoadModel' >> beam.ParDo(
_ModelLoadingIdentityFn(eval_shared_model=eval_shared_model))
| 'CombinePerSlice' >> beam.CombinePerKey(
_AggregateCombineFn(
eval_shared_model=eval_shared_model,
desired_batch_size=desired_batch_size,
compute_with_sampling=compute_with_sampling,
seed_for_testing=random_seed_for_testing))
| 'InterpretOutput' >> beam.ParDo(
_ExtractOutputDoFn(eval_shared_model=eval_shared_model)))
def _add_metric_variables(
left: types.MetricVariablesType,
right: types.MetricVariablesType) -> types.MetricVariablesType:
if left is not None and right is not None:
if len(left) != len(right):
raise ValueError('metric variables lengths should match, but got '
'%d and %d' % (len(left), len(right)))
return [x + y for x, y in zip(left, right)]
elif left is not None:
return left
else:
return right
class _AggState(object):
__slots__ = ['metric_variables', 'inputs']
def __init__(self):
self.metric_variables = None
self.inputs = [
]
def copy_from(
self, other: '_AggState') -> None:
if other.metric_variables:
self.metric_variables = other.metric_variables
self.inputs = other.inputs
def __iadd__(self, other: '_AggState') -> '_AggState':
self.metric_variables = _add_metric_variables(self.metric_variables,
other.metric_variables)
self.inputs.extend(other.inputs)
return self
def add_input(self, new_input) -> None:
self.inputs.append(new_input)
def add_metrics_variables(
self, metric_variables: types.MetricVariablesType) -> None:
self.metric_variables = _add_metric_variables(self.metric_variables,
metric_variables)
@beam.typehints.with_input_types(types.Extracts)
@beam.typehints.with_output_types(Optional[List[Any]])
class _AggregateCombineFn(model_util.CombineFnWithModels):
# maximum memory usage of the computation.
_DEFAULT_DESIRED_BATCH_SIZE = 1000
def __init__(self,
eval_shared_model: types.EvalSharedModel,
desired_batch_size: Optional[int] = None,
compute_with_sampling: Optional[bool] = False,
seed_for_testing: Optional[int] = None) -> None:
super(_AggregateCombineFn,
self).__init__({'': eval_shared_model.model_loader})
self._seed_for_testing = seed_for_testing
self._eval_metrics_graph = None # type: eval_metrics_graph.EvalMetricsGraph
# We really want the batch size to be adaptive like it is in
# beam.BatchElements(), but there isn't an easy way to make it so.
if desired_batch_size and desired_batch_size > 0:
self._desired_batch_size = desired_batch_size
else:
self._desired_batch_size = self._DEFAULT_DESIRED_BATCH_SIZE
self._compute_with_sampling = compute_with_sampling
self._random_state = np.random.RandomState(seed_for_testing)
self._combine_batch_size = beam.metrics.Metrics.distribution(
constants.METRICS_NAMESPACE, 'combine_batch_size')
self._num_compacts = beam.metrics.Metrics.counter(
constants.METRICS_NAMESPACE, 'num_compacts')
def _poissonify(self, accumulator: _AggState) -> List[bytes]:
result = []
if accumulator.inputs:
poisson_counts = self._random_state.poisson(1, len(accumulator.inputs))
for i, input_item in enumerate(accumulator.inputs):
result.extend([input_item] * poisson_counts[i])
return result
def _maybe_do_batch(self,
accumulator: _AggState,
force: bool = False) -> None:
if self._eval_metrics_graph is None:
self._setup_if_needed()
self._eval_metrics_graph = self._loaded_models['']
batch_size = len(accumulator.inputs)
if force or batch_size >= self._desired_batch_size:
if accumulator.inputs:
self._combine_batch_size.update(batch_size)
inputs_for_metrics = accumulator.inputs
if self._compute_with_sampling:
inputs_for_metrics = self._poissonify(accumulator)
if inputs_for_metrics:
accumulator.add_metrics_variables(
self._eval_metrics_graph.metrics_reset_update_get_list(
inputs_for_metrics))
else:
self._eval_metrics_graph.reset_metric_variables()
del accumulator.inputs[:]
def create_accumulator(self) -> _AggState:
return _AggState()
def add_input(self, accumulator: _AggState,
elem: types.Extracts) -> _AggState:
accumulator.add_input(elem[constants.INPUT_KEY])
self._maybe_do_batch(accumulator)
return accumulator
def merge_accumulators(self, accumulators: Iterable[_AggState]) -> _AggState:
result = self.create_accumulator()
for acc in accumulators:
result += acc
#
# So we could potentially accumulate get num_bundles * batch_size
# elements if we don't process the batches within the loop, which
self._maybe_do_batch(result)
return result
def compact(self, accumulator: _AggState) -> _AggState:
self._maybe_do_batch(accumulator, force=True)
self._num_compacts.inc(1)
return accumulator
def extract_output(
self, accumulator: _AggState) -> Optional[types.MetricVariablesType]:
# not produced by a call to compact (which is not guaranteed across all Beam
# Runners), so we defensively flush it here again, before we extract data
# from it, to ensure correctness.
self._maybe_do_batch(accumulator, force=True)
return accumulator.metric_variables
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType,
Optional[List[Any]]])
# TODO(b/123516222): Add output typehints. Similarly elsewhere that it applies.
class _ExtractOutputDoFn(model_util.DoFnWithModels):
def __init__(self, eval_shared_model: types.EvalSharedModel) -> None:
super(_ExtractOutputDoFn,
self).__init__({'': eval_shared_model.model_loader})
# This keeps track of the number of times the poisson bootstrap encounters
# an empty set of elements for a slice sample. Should be extremely rare in
# practice, keeping this counter will help us understand if something is
# misbehaving.
self._num_bootstrap_empties = beam.metrics.Metrics.counter(
constants.METRICS_NAMESPACE, 'num_bootstrap_empties')
def process(
self, element: Tuple[slicer.SliceKeyType, types.MetricVariablesType]
) -> Generator[Tuple[slicer.SliceKeyType, Dict[Text, Any]], None, None]:
(slice_key, metric_variables) = element
if metric_variables:
eval_saved_model = self._loaded_models['']
result = eval_saved_model.metrics_set_variables_and_get_values(
metric_variables)
yield (slice_key, result)
else:
# Increase a counter for empty bootstrap samples. When sampling is not
# enabled, this should never be exected. This should only occur when the
# slice sizes are incredibly small, and seeing large values of this
# counter is a sign that something has gone wrong.
self._num_bootstrap_empties.inc(1)
@beam.typehints.with_input_types(Tuple[slicer.SliceKeyType, types.Extracts])
@beam.typehints.with_output_types(Tuple[slicer.SliceKeyType, types.Extracts])
class _ModelLoadingIdentityFn(model_util.DoFnWithModels):
def __init__(self, eval_shared_model: types.EvalSharedModel) -> None:
super(_ModelLoadingIdentityFn,
self).__init__({'': eval_shared_model.model_loader})
def process(
self, element: Tuple[slicer.SliceKeyType, types.Extracts]
) -> List[Tuple[slicer.SliceKeyType, types.Extracts]]:
return [element]
| true | true |
f73d3e24aacb7656b04f22a6380b4de22def67fb | 8,167 | py | Python | audiophiler/__init__.py | nfatkhiyev/audiophiler | 527d975bdd06604da1a9b3212adcab749c116253 | [
"MIT"
] | null | null | null | audiophiler/__init__.py | nfatkhiyev/audiophiler | 527d975bdd06604da1a9b3212adcab749c116253 | [
"MIT"
] | null | null | null | audiophiler/__init__.py | nfatkhiyev/audiophiler | 527d975bdd06604da1a9b3212adcab749c116253 | [
"MIT"
] | 1 | 2020-02-11T17:52:48.000Z | 2020-02-11T17:52:48.000Z | # File: __init__.py
# Audiophiler main flask functions
import hashlib
import os
import random
import subprocess
import json
import requests
import flask_migrate
from flask import Flask, render_template, request, jsonify, redirect
from flask_pyoidc.provider_configuration import *
from flask_pyoidc.flask_pyoidc import OIDCAuthentication
from flask_sqlalchemy import SQLAlchemy
from werkzeug.utils import secure_filename
from csh_ldap import CSHLDAP
from audiophiler.s3 import *
from audiophiler.util import *
app = Flask(__name__)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
# Get app config from absolute file path
if os.path.exists(os.path.join(os.getcwd(), "config.py")):
app.config.from_pyfile(os.path.join(os.getcwd(), "config.py"))
else:
app.config.from_pyfile(os.path.join(os.getcwd(), "config.env.py"))
git_cmd = ['git', 'rev-parse', '--short', 'HEAD']
app.config["GIT_REVISION"] = subprocess.check_output(git_cmd).decode('utf-8').rstrip()
_config = ProviderConfiguration(
app.config['OIDC_ISSUER'],
client_metadata = ClientMetadata(
app.config['OIDC_CLIENT_CONFIG']['client_id'],
app.config['OIDC_CLIENT_CONFIG']['client_secret']
)
)
auth = OIDCAuthentication({'default': _config}, app)
# Get s3 bucket for use in functions and templates
s3_bucket = get_bucket(app.config["S3_URL"], app.config["S3_KEY"],
app.config["S3_SECRET"], app.config["BUCKET_NAME"])
# Database setup
db = SQLAlchemy(app)
migrate = flask_migrate.Migrate(app, db)
# Import db models after instantiating db object
from audiophiler.models import File, Harold, Auth
# Create CSHLDAP connection
ldap = CSHLDAP(app.config["LDAP_BIND_DN"],
app.config["LDAP_BIND_PW"])
# Import ldap functions after creating ldap conn
from audiophiler.ldap import ldap_is_eboard, ldap_is_rtp
# Disable SSL certificate verification warning
requests.packages.urllib3.disable_warnings()
@app.route("/")
@auth.oidc_auth('default')
@audiophiler_auth
def home(auth_dict=None):
# Retrieve list of files for templating
db_files = File.query.all()
harolds = get_harold_list(auth_dict["uid"])
is_rtp = ldap_is_rtp(auth_dict["uid"])
is_eboard = ldap_is_eboard(auth_dict["uid"])
return render_template("main.html", db_files=db_files,
get_date_modified=get_date_modified, s3_bucket=s3_bucket,
auth_dict=auth_dict, harolds=harolds, is_rtp=is_rtp,
is_eboard=is_eboard)
@app.route("/mine")
@auth.oidc_auth('default')
@audiophiler_auth
def mine(auth_dict=None):
# Retrieve list of files for templating
db_files = File.query.filter_by(author=auth_dict["uid"]).all()
harolds = get_harold_list(auth_dict["uid"])
return render_template("main.html", db_files=db_files,
get_file_s3=get_file_s3, get_date_modified=get_date_modified,
s3_bucket=s3_bucket, auth_dict=auth_dict, harolds=harolds,
is_rtp=False, is_eboard=False)
@app.route("/selected")
@auth.oidc_auth('default')
@audiophiler_auth
def selected(auth_dict=None):
is_rtp = ldap_is_rtp(auth_dict["uid"])
is_eboard = ldap_is_eboard(auth_dict["uid"])
#Retrieve list of files for templating
harolds = get_harold_list(auth_dict["uid"])
db_files = File.query.filter(File.file_hash.in_(harolds)).all()
return render_template("main.html", db_files=db_files,
get_date_modified=get_date_modified, s3_bucket=s3_bucket,
auth_dict=auth_dict, harolds=harolds, is_rtp=is_rtp,
is_eboard=is_eboard)
@app.route("/upload", methods=["GET"])
@auth.oidc_auth('default')
@audiophiler_auth
def upload_page(auth_dict=None):
return render_template("upload.html", auth_dict=auth_dict)
@app.route("/upload", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def upload(auth_dict=None):
uploaded_files = [t[1] for t in request.files.items()]
upload_status = {}
upload_status["error"] = []
upload_status["success"] = []
for f in uploaded_files:
# Sanitize file name
filename = secure_filename(f.filename)
# Hash the file contents (read file in ram)
# File contents cannot be read in chunks (this is a flaw in boto file objects)
file_hash = hashlib.md5(f.read()).hexdigest()
# Reset file pointer to avoid EOF
f.seek(0)
# Check if file hash is the same as any files already in the db
if File.query.filter_by(file_hash=file_hash).first():
upload_status["error"].append(filename)
break
# Add file info to db
file_model = File(filename, auth_dict["uid"], file_hash)
if file_model is None:
upload_status["error"].append(filename)
break
# Upload file to s3
upload_file(s3_bucket, file_hash, f)
# Add file_model to DB and flush
db.session.add(file_model)
db.session.flush()
db.session.commit()
db.session.refresh(file_model)
# Set success status info
upload_status["success"].append({
"name": file_model.name,
"file_hash": file_model.file_hash
})
return jsonify(upload_status)
@app.route("/delete/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def delete_file(file_hash, auth_dict=None):
# Find file model in db
file_model = File.query.filter(File.file_hash == file_hash).first()
if file_model is None:
return "File Not Found", 404
if not auth_dict["uid"] == file_model.author:
if not (ldap_is_eboard(auth_dict["uid"]) or ldap_is_rtp(auth_dict["uid"])):
return "Permission Denied", 403
# Delete file model
db.session.delete(file_model)
db.session.flush()
db.session.commit()
# Delete harold model
remove_harold(file_hash)
# Delete file from s3
remove_file(s3_bucket, file_hash)
return "OK go for it", 200
@app.route("/get_file_url/<string:file_hash>")
@auth.oidc_auth('default')
@audiophiler_auth
def get_s3_url(file_hash, auth_dict=None):
# Endpoint to return a presigned url to the s3 asset
return redirect(get_file_s3(s3_bucket, file_hash))
@app.route("/set_harold/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def set_harold(file_hash, auth_dict=None):
harold_model = Harold(file_hash, auth_dict["uid"])
db.session.add(harold_model)
db.session.flush()
db.session.commit()
db.session.refresh(harold_model)
return "OK", 200
@app.route("/delete_harold/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def remove_harold(file_hash, auth_dict=None):
harold_model = Harold.query.filter(Harold.file_hash == file_hash).first()
if harold_model is None:
return "File Not Found", 404
db.session.delete(harold_model)
db.session.flush()
db.session.commit()
return "OK go for it", 200
# This is a post route since auth_key is required
@app.route("/get_harold/<string:uid>", methods=["POST"])
def get_harold(uid, auth_dict=None):
data_dict = request.get_json()
if data_dict["auth_key"]:
auth_models = Auth.query.all()
for auth_obj in auth_models:
if auth_obj.auth_key == data_dict["auth_key"]:
harold_file_hash = None
harolds_list = get_harold_list(uid)
if len(harolds_list) == 0:
harold_file_hash = get_random_harold()
else:
harold_file_hash = random.choice(harolds_list)
return get_file_s3(s3_bucket, harold_file_hash)
return "Permission denied", 403
@app.route("/logout")
@auth.oidc_logout
def logout():
return redirect("/", 302)
def get_harold_list(uid):
harold_list = Harold.query.filter_by(owner=uid).all()
harolds = [harold.file_hash for harold in harold_list]
return harolds
def get_random_harold():
query = Harold.query
row_count = int(query.count())
randomized_entry = query.offset(int(row_count*random.random())).first()
return randomized_entry.file_hash
| 33.064777 | 86 | 0.691196 |
import hashlib
import os
import random
import subprocess
import json
import requests
import flask_migrate
from flask import Flask, render_template, request, jsonify, redirect
from flask_pyoidc.provider_configuration import *
from flask_pyoidc.flask_pyoidc import OIDCAuthentication
from flask_sqlalchemy import SQLAlchemy
from werkzeug.utils import secure_filename
from csh_ldap import CSHLDAP
from audiophiler.s3 import *
from audiophiler.util import *
app = Flask(__name__)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
if os.path.exists(os.path.join(os.getcwd(), "config.py")):
app.config.from_pyfile(os.path.join(os.getcwd(), "config.py"))
else:
app.config.from_pyfile(os.path.join(os.getcwd(), "config.env.py"))
git_cmd = ['git', 'rev-parse', '--short', 'HEAD']
app.config["GIT_REVISION"] = subprocess.check_output(git_cmd).decode('utf-8').rstrip()
_config = ProviderConfiguration(
app.config['OIDC_ISSUER'],
client_metadata = ClientMetadata(
app.config['OIDC_CLIENT_CONFIG']['client_id'],
app.config['OIDC_CLIENT_CONFIG']['client_secret']
)
)
auth = OIDCAuthentication({'default': _config}, app)
s3_bucket = get_bucket(app.config["S3_URL"], app.config["S3_KEY"],
app.config["S3_SECRET"], app.config["BUCKET_NAME"])
db = SQLAlchemy(app)
migrate = flask_migrate.Migrate(app, db)
from audiophiler.models import File, Harold, Auth
ldap = CSHLDAP(app.config["LDAP_BIND_DN"],
app.config["LDAP_BIND_PW"])
from audiophiler.ldap import ldap_is_eboard, ldap_is_rtp
requests.packages.urllib3.disable_warnings()
@app.route("/")
@auth.oidc_auth('default')
@audiophiler_auth
def home(auth_dict=None):
db_files = File.query.all()
harolds = get_harold_list(auth_dict["uid"])
is_rtp = ldap_is_rtp(auth_dict["uid"])
is_eboard = ldap_is_eboard(auth_dict["uid"])
return render_template("main.html", db_files=db_files,
get_date_modified=get_date_modified, s3_bucket=s3_bucket,
auth_dict=auth_dict, harolds=harolds, is_rtp=is_rtp,
is_eboard=is_eboard)
@app.route("/mine")
@auth.oidc_auth('default')
@audiophiler_auth
def mine(auth_dict=None):
db_files = File.query.filter_by(author=auth_dict["uid"]).all()
harolds = get_harold_list(auth_dict["uid"])
return render_template("main.html", db_files=db_files,
get_file_s3=get_file_s3, get_date_modified=get_date_modified,
s3_bucket=s3_bucket, auth_dict=auth_dict, harolds=harolds,
is_rtp=False, is_eboard=False)
@app.route("/selected")
@auth.oidc_auth('default')
@audiophiler_auth
def selected(auth_dict=None):
is_rtp = ldap_is_rtp(auth_dict["uid"])
is_eboard = ldap_is_eboard(auth_dict["uid"])
harolds = get_harold_list(auth_dict["uid"])
db_files = File.query.filter(File.file_hash.in_(harolds)).all()
return render_template("main.html", db_files=db_files,
get_date_modified=get_date_modified, s3_bucket=s3_bucket,
auth_dict=auth_dict, harolds=harolds, is_rtp=is_rtp,
is_eboard=is_eboard)
@app.route("/upload", methods=["GET"])
@auth.oidc_auth('default')
@audiophiler_auth
def upload_page(auth_dict=None):
return render_template("upload.html", auth_dict=auth_dict)
@app.route("/upload", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def upload(auth_dict=None):
uploaded_files = [t[1] for t in request.files.items()]
upload_status = {}
upload_status["error"] = []
upload_status["success"] = []
for f in uploaded_files:
filename = secure_filename(f.filename)
file_hash = hashlib.md5(f.read()).hexdigest()
f.seek(0)
if File.query.filter_by(file_hash=file_hash).first():
upload_status["error"].append(filename)
break
file_model = File(filename, auth_dict["uid"], file_hash)
if file_model is None:
upload_status["error"].append(filename)
break
upload_file(s3_bucket, file_hash, f)
db.session.add(file_model)
db.session.flush()
db.session.commit()
db.session.refresh(file_model)
upload_status["success"].append({
"name": file_model.name,
"file_hash": file_model.file_hash
})
return jsonify(upload_status)
@app.route("/delete/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def delete_file(file_hash, auth_dict=None):
file_model = File.query.filter(File.file_hash == file_hash).first()
if file_model is None:
return "File Not Found", 404
if not auth_dict["uid"] == file_model.author:
if not (ldap_is_eboard(auth_dict["uid"]) or ldap_is_rtp(auth_dict["uid"])):
return "Permission Denied", 403
db.session.delete(file_model)
db.session.flush()
db.session.commit()
remove_harold(file_hash)
remove_file(s3_bucket, file_hash)
return "OK go for it", 200
@app.route("/get_file_url/<string:file_hash>")
@auth.oidc_auth('default')
@audiophiler_auth
def get_s3_url(file_hash, auth_dict=None):
return redirect(get_file_s3(s3_bucket, file_hash))
@app.route("/set_harold/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def set_harold(file_hash, auth_dict=None):
harold_model = Harold(file_hash, auth_dict["uid"])
db.session.add(harold_model)
db.session.flush()
db.session.commit()
db.session.refresh(harold_model)
return "OK", 200
@app.route("/delete_harold/<string:file_hash>", methods=["POST"])
@auth.oidc_auth('default')
@audiophiler_auth
def remove_harold(file_hash, auth_dict=None):
harold_model = Harold.query.filter(Harold.file_hash == file_hash).first()
if harold_model is None:
return "File Not Found", 404
db.session.delete(harold_model)
db.session.flush()
db.session.commit()
return "OK go for it", 200
@app.route("/get_harold/<string:uid>", methods=["POST"])
def get_harold(uid, auth_dict=None):
data_dict = request.get_json()
if data_dict["auth_key"]:
auth_models = Auth.query.all()
for auth_obj in auth_models:
if auth_obj.auth_key == data_dict["auth_key"]:
harold_file_hash = None
harolds_list = get_harold_list(uid)
if len(harolds_list) == 0:
harold_file_hash = get_random_harold()
else:
harold_file_hash = random.choice(harolds_list)
return get_file_s3(s3_bucket, harold_file_hash)
return "Permission denied", 403
@app.route("/logout")
@auth.oidc_logout
def logout():
return redirect("/", 302)
def get_harold_list(uid):
harold_list = Harold.query.filter_by(owner=uid).all()
harolds = [harold.file_hash for harold in harold_list]
return harolds
def get_random_harold():
query = Harold.query
row_count = int(query.count())
randomized_entry = query.offset(int(row_count*random.random())).first()
return randomized_entry.file_hash
| true | true |
f73d3e282ad15291ac2386ed566f08d7a326f2dd | 6,522 | py | Python | rubik_solver/Solver/CFOP/F2LSolver.py | kazi92/rubikSolver | d5c7e273ca982ee3ccf5c9b43c522954e232afd4 | [
"MIT"
] | 46 | 2017-04-06T09:55:48.000Z | 2022-03-08T13:55:27.000Z | rubik_solver/Solver/CFOP/F2LSolver.py | kazi92/rubikSolver | d5c7e273ca982ee3ccf5c9b43c522954e232afd4 | [
"MIT"
] | 10 | 2018-07-14T03:37:41.000Z | 2022-03-11T14:05:31.000Z | rubik_solver/Solver/CFOP/F2LSolver.py | kazi92/rubikSolver | d5c7e273ca982ee3ccf5c9b43c522954e232afd4 | [
"MIT"
] | 26 | 2017-10-23T21:50:27.000Z | 2021-12-16T06:34:41.000Z | from rubik_solver.Move import Move
from .. import Solver
from ..Beginner.WhiteFaceSolver import WhiteFaceSolver
class F2LSolver(Solver):
STEPS = {
'FUR': {
'UB': ["R", "U", "R'"],
'FU': ["U'", "F'", "U", "F"],
'FR': ["U", "F'", "U", "F", "U", "F'", "U2", "F"],
'RF': ["U", "F'", "U'", "F", "Y", "U'", "F", "U", "F'", "Y'"],
'RU': ["R", "U'", "R'", "U", "Y'", "U", "R'", "U'", "R", "Y"],
'BU': ["U", "F'", "U2", "F", "U", "F'", "U2", "F"],
'LU': ["U", "F'", "U'", "F", "U", "F'", "U2", "F"],
'UR': ["U'", "R", "U'", "R'", "U", "R", "U", "R'"],
'UL': ["U'", "R", "U", "R'", "U", "R", "U", "R'"],
'UF': ["U", "F'", "U2", "F", "U'", "R", "U", "R'"],
},
'URF': {
'LU': ["F'", "U'", "F"],
'UR': ["U", "R", "U'", "R'"],
'FR': ["U'", "R", "U'", "R'", "U'", "R", "U2", "R'"],
'RF': ["U'", "R", "U", "R'", "Y'", "U", "R'", "U'", "R", "Y"],
'UF': ["F'", "U", "F", "U'", "Y", "U'", "F", "U", "F'", "Y'"],
'UL': ["U'", "R", "U2", "R'", "U'", "R", "U2", "R'"],
'UB': ["U'", "R", "U", "R'", "U'", "R", "U2", "R'"],
'FU': ["U", "F'", "U", "F", "U'", "F'", "U'", "F"],
'BU': ["U", "F'", "U'", "F", "U'", "F'", "U'", "F"],
'RU': ["U'", "R", "U2", "R'", "U", "F'", "U'", "F"],
},
'FRD': {
'FU': ["U", "R", "U'", "R'", "U'", "F'", "U", "F"],
'RU': ["U2", "R", "U'", "R'", "U'", "F'", "U", "F"],
'LU': ["R", "U'", "R'", "U'", "F'", "U", "F"],
'BU': ["U'", "R", "U'", "R'", "U'", "F'", "U", "F"],
'UR': ["U'", "F'", "U", "F", "U", "R", "U'", "R'"],
'UL': ["U", "F'", "U", "F", "U", "R", "U'", "R'"],
'UB': ["F'", "U", "F", "U", "R", "U'", "R'"],
'UF': ["U2", "F'", "U", "F", "U", "R", "U'", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U2", "R", "U", "R'", "U2", "R", "Y"],
'FR': [],
},
'DFR': {
'FU': ["F'", "U", "F", "U'", "F'", "U", "F"],
'UR': ["R", "U", "R'", "U'", "R", "U", "R'"],
'FR': ["R", "U'", "R'", "U", "R", "U2", "R'", "U", "R", "U'", "R'"],
'RF': ["R", "U", "R'", "U'", "R", "U'", "R'", "U", "Y'", "U", "R'", "U'", "R", "Y"],
},
'RDF': {
'FU': ["F'", "U'", "F", "U", "F'", "U'", "F"],
'UR': ["R", "U'", "R'", "U", "R", "U'", "R'"],
'FR': ["R", "U'", "R'", "U'", "R", "U", "R'", "U'", "R", "U2", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U'", "R", "U'", "R'", "U'", "R", "Y"]
},
'RFU':{
'FR': ["R", "U", "R'", "U'", "R", "U", "R'", "U'", "R", "U", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U", "R", "Y"],
'UF': ["R", "U", "R'", "U'", "U'", "R", "U", "R'", "U'", "R", "U", "R'"],
'UL': ["U2", "R", "U", "R'", "U", "R", "U'", "R'"],
'UB': ["U", "R", "U2", "R'", "U", "R", "U'", "R'"],
'UR': ["R", "U2", "R'", "U'", "R", "U", "R'"],
'LU': ["U'", "F'", "U2", "F", "U'", "F'", "U", "F"],
'BU': ["U2", "F'", "U'", "F", "U'", "F'", "U", "F"],
'RU': ["Y'", "R'", "U'", "R", "U2", "R'", "U'", "R", "U", "R'", "U'", "R", "Y"],
'FU': ["F'", "U2", "F", "U", "F'", "U'", "F"],
},
}
@staticmethod
def get_step(corner, edge):
'''
This method returns the step to place to 2 cubies in place,
the variables encodes the cubies position and orientation.
corner must be a string with 3 letters, each letter represents
the facing of the colors in the following way:
1st letter: where the front color (cubie F) is facing in the corner to move
2nd letter: where the right color (cubie R) is facing in the corner to move
3rd letter: where the bottom color (cubie B, usually white) is facing in the corner to move
The same applies with the edge variable
'''
return F2LSolver.STEPS[corner][edge]
def move(self, s, solution):
self.cube.move(Move(s))
solution.append(s)
def solution(self):
solution = []
for _ in range(4):
front_color = self.cube.cubies['F'].facings['F'].color
right_color = self.cube.cubies['R'].facings['R'].color
corner = self.cube.search_by_colors(front_color, right_color, 'W')
step_solution = WhiteFaceSolver.first_step(corner, self.cube.cubies[corner].color_facing('W'))
solution.extend(step_solution)
for s in step_solution:
self.cube.move(Move(s))
edge = self.cube.search_by_colors(front_color, right_color)
# If edge is in BL or BR, WAF!, this case is not expected in any manual
if edge == 'BL':
self.move("B'", solution)
self.move("U'", solution)
self.move("B", solution)
elif edge == 'BR':
self.move("B", solution)
self.move("U", solution)
self.move("B'", solution)
elif edge == 'FL':
self.move("L'", solution)
self.move("U'", solution)
self.move("L", solution)
corner = self.cube.search_by_colors(front_color, right_color, 'W')
#Place corner in FRU if needed
if 'U' in corner:
while corner != 'FRU':
self.move("U", solution)
corner = self.cube.search_by_colors(front_color, right_color, 'W')
edge = self.cube.search_by_colors(front_color, right_color)
corner_facings = ''.join([
self.cube.cubies[corner].color_facing(front_color),
self.cube.cubies[corner].color_facing(right_color),
self.cube.cubies[corner].color_facing('W')
])
edge_facings = ''.join([
self.cube.cubies[edge].color_facing(front_color),
self.cube.cubies[edge].color_facing(right_color)
])
step_solution = F2LSolver.get_step(corner_facings, edge_facings)
solution.extend(step_solution)
for s in step_solution:
self.cube.move(Move(s))
self.cube.move(Move("Y"))
solution.append("Y")
return solution
| 46.255319 | 106 | 0.364765 | from rubik_solver.Move import Move
from .. import Solver
from ..Beginner.WhiteFaceSolver import WhiteFaceSolver
class F2LSolver(Solver):
STEPS = {
'FUR': {
'UB': ["R", "U", "R'"],
'FU': ["U'", "F'", "U", "F"],
'FR': ["U", "F'", "U", "F", "U", "F'", "U2", "F"],
'RF': ["U", "F'", "U'", "F", "Y", "U'", "F", "U", "F'", "Y'"],
'RU': ["R", "U'", "R'", "U", "Y'", "U", "R'", "U'", "R", "Y"],
'BU': ["U", "F'", "U2", "F", "U", "F'", "U2", "F"],
'LU': ["U", "F'", "U'", "F", "U", "F'", "U2", "F"],
'UR': ["U'", "R", "U'", "R'", "U", "R", "U", "R'"],
'UL': ["U'", "R", "U", "R'", "U", "R", "U", "R'"],
'UF': ["U", "F'", "U2", "F", "U'", "R", "U", "R'"],
},
'URF': {
'LU': ["F'", "U'", "F"],
'UR': ["U", "R", "U'", "R'"],
'FR': ["U'", "R", "U'", "R'", "U'", "R", "U2", "R'"],
'RF': ["U'", "R", "U", "R'", "Y'", "U", "R'", "U'", "R", "Y"],
'UF': ["F'", "U", "F", "U'", "Y", "U'", "F", "U", "F'", "Y'"],
'UL': ["U'", "R", "U2", "R'", "U'", "R", "U2", "R'"],
'UB': ["U'", "R", "U", "R'", "U'", "R", "U2", "R'"],
'FU': ["U", "F'", "U", "F", "U'", "F'", "U'", "F"],
'BU': ["U", "F'", "U'", "F", "U'", "F'", "U'", "F"],
'RU': ["U'", "R", "U2", "R'", "U", "F'", "U'", "F"],
},
'FRD': {
'FU': ["U", "R", "U'", "R'", "U'", "F'", "U", "F"],
'RU': ["U2", "R", "U'", "R'", "U'", "F'", "U", "F"],
'LU': ["R", "U'", "R'", "U'", "F'", "U", "F"],
'BU': ["U'", "R", "U'", "R'", "U'", "F'", "U", "F"],
'UR': ["U'", "F'", "U", "F", "U", "R", "U'", "R'"],
'UL': ["U", "F'", "U", "F", "U", "R", "U'", "R'"],
'UB': ["F'", "U", "F", "U", "R", "U'", "R'"],
'UF': ["U2", "F'", "U", "F", "U", "R", "U'", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U2", "R", "U", "R'", "U2", "R", "Y"],
'FR': [],
},
'DFR': {
'FU': ["F'", "U", "F", "U'", "F'", "U", "F"],
'UR': ["R", "U", "R'", "U'", "R", "U", "R'"],
'FR': ["R", "U'", "R'", "U", "R", "U2", "R'", "U", "R", "U'", "R'"],
'RF': ["R", "U", "R'", "U'", "R", "U'", "R'", "U", "Y'", "U", "R'", "U'", "R", "Y"],
},
'RDF': {
'FU': ["F'", "U'", "F", "U", "F'", "U'", "F"],
'UR': ["R", "U'", "R'", "U", "R", "U'", "R'"],
'FR': ["R", "U'", "R'", "U'", "R", "U", "R'", "U'", "R", "U2", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U'", "R", "U'", "R'", "U'", "R", "Y"]
},
'RFU':{
'FR': ["R", "U", "R'", "U'", "R", "U", "R'", "U'", "R", "U", "R'"],
'RF': ["R", "U'", "R'", "Y'", "U", "R'", "U", "R", "Y"],
'UF': ["R", "U", "R'", "U'", "U'", "R", "U", "R'", "U'", "R", "U", "R'"],
'UL': ["U2", "R", "U", "R'", "U", "R", "U'", "R'"],
'UB': ["U", "R", "U2", "R'", "U", "R", "U'", "R'"],
'UR': ["R", "U2", "R'", "U'", "R", "U", "R'"],
'LU': ["U'", "F'", "U2", "F", "U'", "F'", "U", "F"],
'BU': ["U2", "F'", "U'", "F", "U'", "F'", "U", "F"],
'RU': ["Y'", "R'", "U'", "R", "U2", "R'", "U'", "R", "U", "R'", "U'", "R", "Y"],
'FU': ["F'", "U2", "F", "U", "F'", "U'", "F"],
},
}
@staticmethod
def get_step(corner, edge):
return F2LSolver.STEPS[corner][edge]
def move(self, s, solution):
self.cube.move(Move(s))
solution.append(s)
def solution(self):
solution = []
for _ in range(4):
front_color = self.cube.cubies['F'].facings['F'].color
right_color = self.cube.cubies['R'].facings['R'].color
corner = self.cube.search_by_colors(front_color, right_color, 'W')
step_solution = WhiteFaceSolver.first_step(corner, self.cube.cubies[corner].color_facing('W'))
solution.extend(step_solution)
for s in step_solution:
self.cube.move(Move(s))
edge = self.cube.search_by_colors(front_color, right_color)
# If edge is in BL or BR, WAF!, this case is not expected in any manual
if edge == 'BL':
self.move("B'", solution)
self.move("U'", solution)
self.move("B", solution)
elif edge == 'BR':
self.move("B", solution)
self.move("U", solution)
self.move("B'", solution)
elif edge == 'FL':
self.move("L'", solution)
self.move("U'", solution)
self.move("L", solution)
corner = self.cube.search_by_colors(front_color, right_color, 'W')
if 'U' in corner:
while corner != 'FRU':
self.move("U", solution)
corner = self.cube.search_by_colors(front_color, right_color, 'W')
edge = self.cube.search_by_colors(front_color, right_color)
corner_facings = ''.join([
self.cube.cubies[corner].color_facing(front_color),
self.cube.cubies[corner].color_facing(right_color),
self.cube.cubies[corner].color_facing('W')
])
edge_facings = ''.join([
self.cube.cubies[edge].color_facing(front_color),
self.cube.cubies[edge].color_facing(right_color)
])
step_solution = F2LSolver.get_step(corner_facings, edge_facings)
solution.extend(step_solution)
for s in step_solution:
self.cube.move(Move(s))
self.cube.move(Move("Y"))
solution.append("Y")
return solution
| true | true |
f73d3e7a775663a6c877c5359c98a292147c2678 | 2,169 | py | Python | ctr/model.py | Ottovonxu/islide | 5ee9954e378f0b5a0722292351cb3cc74b95c1b3 | [
"Apache-2.0"
] | null | null | null | ctr/model.py | Ottovonxu/islide | 5ee9954e378f0b5a0722292351cb3cc74b95c1b3 | [
"Apache-2.0"
] | null | null | null | ctr/model.py | Ottovonxu/islide | 5ee9954e378f0b5a0722292351cb3cc74b95c1b3 | [
"Apache-2.0"
] | null | null | null | import torch
from torch import nn
class FCN(nn.Module):
def __init__(self, dimension, num_layers=3, num_class=2):
super(FCN, self).__init__()
self.first_layer = nn.Linear(dimension, 1000)
self.first_layer_relu = nn.ReLU()
mid_layers = []
for i in range(num_layers - 2):
mid_layers.append(nn.Linear(1000, 1000))
mid_layers.append(nn.ReLU())
self.mid_layers = nn.Sequential(*mid_layers)
self.last_layer = nn.Linear(1000, num_class)
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
x = self.first_layer(x)
x = self.first_layer_relu(x)
for layer in self.mid_layers:
x = layer(x)
x = self.last_layer(x)
x = self.softmax(x)
return x
class LR(nn.Module):
def __init__(self, dimension, num_class=2):
super(LR, self).__init__()
self.last_layer = nn.Linear(dimension, num_class)
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
x = self.last_layer(x)
x = self.softmax(x)
return x
class LRSG(nn.Module):
def __init__(self, dimension, num_class=1):
super(LRSG, self).__init__()
self.last_layer = nn.Linear(dimension, num_class)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.last_layer(x)
x = self.sigmoid(x)
return x
class FCNSG(nn.Module):
def __init__(self, dimension, num_layers=3, num_class=1):
super(FCNSG, self).__init__()
self.first_layer = nn.Linear(dimension, 1000)
self.first_layer_relu = nn.ReLU()
mid_layers = []
for i in range(num_layers - 2):
mid_layers.append(nn.Linear(1000, 1000))
mid_layers.append(nn.ReLU())
self.mid_layers = nn.Sequential(*mid_layers)
self.last_layer = nn.Linear(1000, num_class)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.first_layer(x)
x = self.first_layer_relu(x)
for layer in self.mid_layers:
x = layer(x)
x = self.last_layer(x)
x = self.sigmoid(x)
return x
| 28.539474 | 61 | 0.591978 | import torch
from torch import nn
class FCN(nn.Module):
def __init__(self, dimension, num_layers=3, num_class=2):
super(FCN, self).__init__()
self.first_layer = nn.Linear(dimension, 1000)
self.first_layer_relu = nn.ReLU()
mid_layers = []
for i in range(num_layers - 2):
mid_layers.append(nn.Linear(1000, 1000))
mid_layers.append(nn.ReLU())
self.mid_layers = nn.Sequential(*mid_layers)
self.last_layer = nn.Linear(1000, num_class)
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
x = self.first_layer(x)
x = self.first_layer_relu(x)
for layer in self.mid_layers:
x = layer(x)
x = self.last_layer(x)
x = self.softmax(x)
return x
class LR(nn.Module):
def __init__(self, dimension, num_class=2):
super(LR, self).__init__()
self.last_layer = nn.Linear(dimension, num_class)
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
x = self.last_layer(x)
x = self.softmax(x)
return x
class LRSG(nn.Module):
def __init__(self, dimension, num_class=1):
super(LRSG, self).__init__()
self.last_layer = nn.Linear(dimension, num_class)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.last_layer(x)
x = self.sigmoid(x)
return x
class FCNSG(nn.Module):
def __init__(self, dimension, num_layers=3, num_class=1):
super(FCNSG, self).__init__()
self.first_layer = nn.Linear(dimension, 1000)
self.first_layer_relu = nn.ReLU()
mid_layers = []
for i in range(num_layers - 2):
mid_layers.append(nn.Linear(1000, 1000))
mid_layers.append(nn.ReLU())
self.mid_layers = nn.Sequential(*mid_layers)
self.last_layer = nn.Linear(1000, num_class)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.first_layer(x)
x = self.first_layer_relu(x)
for layer in self.mid_layers:
x = layer(x)
x = self.last_layer(x)
x = self.sigmoid(x)
return x
| true | true |
f73d3f367af7a111510038a37e6323f994ad0856 | 4,279 | py | Python | plenum/test/view_change/test_re_order_pre_prepares_for_lagged.py | IDunion/indy-plenum | 406afdeca1630be688f803a3cba15115faa20e2b | [
"Apache-2.0"
] | 148 | 2017-07-11T19:05:25.000Z | 2022-03-16T21:31:20.000Z | plenum/test/view_change/test_re_order_pre_prepares_for_lagged.py | IDunion/indy-plenum | 406afdeca1630be688f803a3cba15115faa20e2b | [
"Apache-2.0"
] | 561 | 2017-06-29T17:59:56.000Z | 2022-03-09T15:47:14.000Z | plenum/test/view_change/test_re_order_pre_prepares_for_lagged.py | IDunion/indy-plenum | 406afdeca1630be688f803a3cba15115faa20e2b | [
"Apache-2.0"
] | 378 | 2017-06-29T17:45:27.000Z | 2022-03-26T07:27:59.000Z | import pytest
from plenum.common.constants import DOMAIN_LEDGER_ID, PREPREPARE, PREPARE, COMMIT
from plenum.common.messages.internal_messages import ViewChangeStarted, NewViewCheckpointsApplied
from plenum.common.messages.node_messages import NewView
from plenum.server.consensus.utils import preprepare_to_batch_id
from plenum.test.delayers import delay_3pc, msg_rep_delay
from plenum.test.helper import sdk_send_random_and_check, max_3pc_batch_limits
from plenum.test.node_catchup.helper import waitNodeDataEquality
from plenum.test.node_request.helper import sdk_ensure_pool_functional
from plenum.test.stasher import delay_rules_without_processing
@pytest.fixture(scope="module")
def tconf(tconf):
with max_3pc_batch_limits(tconf, size=1) as tconf:
yield tconf
def test_re_order_pre_prepares_no_pre_prepares(looper, txnPoolNodeSet,
sdk_wallet_client, sdk_pool_handle):
# 1. drop PrePrepars, Prepares and Commits on 4thNode
# Order a couple of requests on Nodes 1-3
lagging_node = txnPoolNodeSet[-1]
other_nodes = txnPoolNodeSet[:-1]
with delay_rules_without_processing(lagging_node.nodeIbStasher, delay_3pc()):
sdk_send_random_and_check(looper, txnPoolNodeSet,
sdk_pool_handle, sdk_wallet_client, 3)
assert all(n.master_last_ordered_3PC == (0, 3) for n in other_nodes)
with delay_rules_without_processing(lagging_node.nodeIbStasher,
msg_rep_delay(types_to_delay=[PREPREPARE, PREPARE, COMMIT])):
# 2. simulate view change start so that
# all PrePrepares/Prepares/Commits are cleared
# and uncommitted txns are reverted
for n in txnPoolNodeSet:
n.replicas.send_to_internal_bus(ViewChangeStarted(view_no=1))
master_ordering_service = n.master_replica._ordering_service
assert not master_ordering_service.prePrepares
assert not master_ordering_service.prepares
assert not master_ordering_service.commits
ledger = n.db_manager.ledgers[DOMAIN_LEDGER_ID]
state = n.db_manager.states[DOMAIN_LEDGER_ID]
assert len(ledger.uncommittedTxns) == 0
assert ledger.uncommitted_root_hash == ledger.tree.root_hash
assert state.committedHead == state.head
# check that all nodes but the lagging one have old_view_pps stored
for n in other_nodes:
assert n.master_replica._ordering_service.old_view_preprepares
assert not lagging_node.master_replica._ordering_service.old_view_preprepares
# 3. Simulate View Change finish to re-order the same PrePrepare
assert lagging_node.master_last_ordered_3PC == (0, 0)
new_master = txnPoolNodeSet[1]
batches = sorted([preprepare_to_batch_id(pp) for _, pp in
new_master.master_replica._ordering_service.old_view_preprepares.items()])
new_view_msg = NewView(viewNo=0,
viewChanges=[],
checkpoint=new_master.master_replica._consensus_data.initial_checkpoint,
batches=batches)
new_view_chk_applied_msg = NewViewCheckpointsApplied(view_no=0,
view_changes=[],
checkpoint=None,
batches=batches)
for n in txnPoolNodeSet:
n.master_replica._consensus_data.new_view_votes.add_new_view(new_view_msg, n.master_replica._consensus_data.primary_name)
n.master_replica._consensus_data.prev_view_prepare_cert = batches[-1].pp_seq_no
n.master_replica._ordering_service._bus.send(new_view_chk_applied_msg)
# 4. Make sure that the nodes 1-3 (that already ordered the requests) sent Prepares and Commits so that
# the request was eventually ordered on Node4 as well
waitNodeDataEquality(looper, lagging_node, *other_nodes, customTimeout=60)
assert lagging_node.master_last_ordered_3PC == (0, 4)
sdk_ensure_pool_functional(looper, txnPoolNodeSet, sdk_wallet_client, sdk_pool_handle) | 56.302632 | 133 | 0.691283 | import pytest
from plenum.common.constants import DOMAIN_LEDGER_ID, PREPREPARE, PREPARE, COMMIT
from plenum.common.messages.internal_messages import ViewChangeStarted, NewViewCheckpointsApplied
from plenum.common.messages.node_messages import NewView
from plenum.server.consensus.utils import preprepare_to_batch_id
from plenum.test.delayers import delay_3pc, msg_rep_delay
from plenum.test.helper import sdk_send_random_and_check, max_3pc_batch_limits
from plenum.test.node_catchup.helper import waitNodeDataEquality
from plenum.test.node_request.helper import sdk_ensure_pool_functional
from plenum.test.stasher import delay_rules_without_processing
@pytest.fixture(scope="module")
def tconf(tconf):
with max_3pc_batch_limits(tconf, size=1) as tconf:
yield tconf
def test_re_order_pre_prepares_no_pre_prepares(looper, txnPoolNodeSet,
sdk_wallet_client, sdk_pool_handle):
lagging_node = txnPoolNodeSet[-1]
other_nodes = txnPoolNodeSet[:-1]
with delay_rules_without_processing(lagging_node.nodeIbStasher, delay_3pc()):
sdk_send_random_and_check(looper, txnPoolNodeSet,
sdk_pool_handle, sdk_wallet_client, 3)
assert all(n.master_last_ordered_3PC == (0, 3) for n in other_nodes)
with delay_rules_without_processing(lagging_node.nodeIbStasher,
msg_rep_delay(types_to_delay=[PREPREPARE, PREPARE, COMMIT])):
for n in txnPoolNodeSet:
n.replicas.send_to_internal_bus(ViewChangeStarted(view_no=1))
master_ordering_service = n.master_replica._ordering_service
assert not master_ordering_service.prePrepares
assert not master_ordering_service.prepares
assert not master_ordering_service.commits
ledger = n.db_manager.ledgers[DOMAIN_LEDGER_ID]
state = n.db_manager.states[DOMAIN_LEDGER_ID]
assert len(ledger.uncommittedTxns) == 0
assert ledger.uncommitted_root_hash == ledger.tree.root_hash
assert state.committedHead == state.head
for n in other_nodes:
assert n.master_replica._ordering_service.old_view_preprepares
assert not lagging_node.master_replica._ordering_service.old_view_preprepares
assert lagging_node.master_last_ordered_3PC == (0, 0)
new_master = txnPoolNodeSet[1]
batches = sorted([preprepare_to_batch_id(pp) for _, pp in
new_master.master_replica._ordering_service.old_view_preprepares.items()])
new_view_msg = NewView(viewNo=0,
viewChanges=[],
checkpoint=new_master.master_replica._consensus_data.initial_checkpoint,
batches=batches)
new_view_chk_applied_msg = NewViewCheckpointsApplied(view_no=0,
view_changes=[],
checkpoint=None,
batches=batches)
for n in txnPoolNodeSet:
n.master_replica._consensus_data.new_view_votes.add_new_view(new_view_msg, n.master_replica._consensus_data.primary_name)
n.master_replica._consensus_data.prev_view_prepare_cert = batches[-1].pp_seq_no
n.master_replica._ordering_service._bus.send(new_view_chk_applied_msg)
waitNodeDataEquality(looper, lagging_node, *other_nodes, customTimeout=60)
assert lagging_node.master_last_ordered_3PC == (0, 4)
sdk_ensure_pool_functional(looper, txnPoolNodeSet, sdk_wallet_client, sdk_pool_handle) | true | true |
f73d3f47ee4db208101959585a2c6ec037551192 | 321 | py | Python | scripts/burn_token.py | lana-shanghai/hodlthevoid | 6eb02d5ce615de36d2c0c5d0140f8b95f96cbd41 | [
"Apache-2.0"
] | null | null | null | scripts/burn_token.py | lana-shanghai/hodlthevoid | 6eb02d5ce615de36d2c0c5d0140f8b95f96cbd41 | [
"Apache-2.0"
] | null | null | null | scripts/burn_token.py | lana-shanghai/hodlthevoid | 6eb02d5ce615de36d2c0c5d0140f8b95f96cbd41 | [
"Apache-2.0"
] | null | null | null | from brownie import Dogeviathan, accounts, config
def main():
dev = accounts.add(config["wallets"]["from_key"])
attacker = accounts.add(config["wallets"]["from_attacker_key"])
dogeviathan = Dogeviathan[len(Dogeviathan) - 1]
dogeviathan.burn(0, {"from": dev})
#dogeviathan.burn(1, {"from": attacker}) | 35.666667 | 67 | 0.688474 | from brownie import Dogeviathan, accounts, config
def main():
dev = accounts.add(config["wallets"]["from_key"])
attacker = accounts.add(config["wallets"]["from_attacker_key"])
dogeviathan = Dogeviathan[len(Dogeviathan) - 1]
dogeviathan.burn(0, {"from": dev})
| true | true |
f73d3f84737b19db6eef7863ab1ba5ec455b8d46 | 40,043 | py | Python | ma_option_vol/update_excel_workbooks.py | ytmimi/Thesis2017 | 2133ec067f1c2972736d4bbeb83f88a62a591332 | [
"MIT"
] | null | null | null | ma_option_vol/update_excel_workbooks.py | ytmimi/Thesis2017 | 2133ec067f1c2972736d4bbeb83f88a62a591332 | [
"MIT"
] | null | null | null | ma_option_vol/update_excel_workbooks.py | ytmimi/Thesis2017 | 2133ec067f1c2972736d4bbeb83f88a62a591332 | [
"MIT"
] | null | null | null | import warnings
import openpyxl
import os
import datetime as dt
import re
from statistics import mean, stdev
from math import ceil, floor
import add_bloomberg_excel_functions as abxl
from CONSTANTS import ( OPTION_DESCRIPTION_PATTERN_INT, OPTION_DESCRIPTION_PATTERN_FLOAT, OPTION_SHEET_PATTERN_INT, OPTION_SHEET_PATTERN_FLOAT,
STOCK_SHEET_PATTERN, OUTPUT_DIR)
def update_sheet_with_BDP_description(workbook_path, sheet_name, starting_col, starting_row):
'''
Given an excel workbook, The BDP function is added with the appropriate cell reference and description
Note: The excel workbook needs to be opened and saved so that bloomberg data can populate
'''
#opens the workbook
warnings.warn('''deprecated: Please instantiate an Option_Chain_Sheet class
from the data_workbooks module. then use the equivalent sheet_BDP_description method''',
DeprecationWarning)
wb = openpyxl.load_workbook(workbook_path)
#gets the specified sheet from the workbook
sheet = wb[sheet_name]
total_rows = sheet.max_row
total_columns = sheet.max_column
#list to keep track of tickers that have already been used
unique_ticker = []
#iterate over each column:
for i in range(starting_col, total_columns+1, 2):
#iterate over each row:
for j in range(starting_row, total_rows+1):
#check if the current cell is blank
if sheet.cell(row=j, column=i).value == None:
break
else:
#check to see if the cell value is unique
if sheet.cell(row=j, column=i).value not in unique_ticker:
#add the value to the ticker list
unique_ticker.append(sheet.cell(row=j, column=i).value)
#set the value of the adjacent cell
sheet.cell(row=j, column= i+1).value = abxl.add_BDP_fuction(sheet.cell(row=j, column=i).coordinate, "SECURITY_DES")
# #saves the workbook
wb.save(workbook_path)
# wb_name = workbook_path.split('/')[-1]
# data = '{} contracts sampled for {}\n'.format(len(unique_ticker), wb_name)
# store_data_to_txt_file(file_name='option_des', data=data)
def update_option_contract_sheets(workbook_path, sheet_name,starting_col,starting_row, sheet_start_date_cell, sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None):
'''
Creates new sheets in the given excel workbook based on Option data stored in the given sheet_name.
workbook_path the full file_path to the specified workbook
sheet_name Specify the sheet_name that data will be referenced from
sheet_start_date_cell Give the coordinates of the cell in
sheet_end_date_cell Specify the coordinates of the the cell in the specified sheet that contains the end date
'''
warnings.warn('''deprecated: Please instantiate an Option_Workbook class
from the data_workbooks module. then use the equivalent create_option_sheet method''',
DeprecationWarning)
#combine data_table_index and data_table_header
total_data_headers = data_table_index+data_table_header
#data labels to be added to the new excel worksheet
option_data_labels = ['Security Name', 'Description', 'Type', 'Expiration Date', 'Strike Price']
#given the file path, an excel workbook is loaded.
wb = openpyxl.load_workbook(workbook_path)
#The sheet we want to get data from is set to the variable data_sheet
data_sheet = wb[sheet_name]
#The cell in the sheet that contains the start date, as passed in by the function.
if type(data_sheet[sheet_end_date_cell].value) == int:
start_date = dt.datetime.strptime(str(data_sheet[sheet_start_date_cell].value),'%Y%m%d').date()
else:
start_date= data_sheet[sheet_start_date_cell].value.date()
#The cell in the sheet that contains the announcement date, as passed in by the function.
if type(data_sheet[sheet_announce_date_cell].value) == int:
announcement_date = dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d').date()
else:
announcement_date= data_sheet[sheet_announce_date_cell].value.date()
total_rows = data_sheet.max_row
total_columns = data_sheet.max_column
#counter to keep track of each sheet created
sheet_count = 0
#gets the average stock price and standard deviation of the stock price data for the historic and merger period:
historic = historic_stock_mean_and_std(reference_wb_path=workbook_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
merger = merger_stock_mean_and_std(reference_wb_path=workbook_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
#iterate through the columns of the data_sheet
for i in range(starting_col, total_columns+1, 2):
#iterate through the rows of the data_sheet
for j in range(starting_row, total_rows+1):
ticker_cell = data_sheet.cell(row=j, column=i).value
des_cell = data_sheet.cell(row=j, column=i+1).value
#check if the ticker and description cell does not = None
if ((ticker_cell != None) and (des_cell != None)):
#checks to see if the description cell value follows the pattern of an option description
if (re.match(OPTION_DESCRIPTION_PATTERN_INT, des_cell) or re.match(OPTION_DESCRIPTION_PATTERN_FLOAT, des_cell)) :
#format_option_description() returns the following list:
#[security_name, option_description, option_type, expiration_date, strike_price]
option_data = format_option_description(ticker_cell, des_cell)
#the number of days between the expiration and start date.
expiration_from_start = (option_data[3] - start_date).days
#the number of days past the annoucement date and the
days_past_announcemt = (option_data[3]- announcement_date).days
#import pdb; pdb.set_trace()
#if the expiration date is less than 8 days after the start date or if the expiration date is 60 days paste the announcment date, do nothing.
if (expiration_from_start < 8) or (days_past_announcemt > 60) :
pass
#otherwise, keep creating sheets
else:
#check to see if the stike is within 1.5 standard deviation of the historical and merger stock mean
if ((is_in_range(num=option_data[-1], high=historic[0]+1.5*historic[1], low=historic[0]-1.5*historic[1])) or (is_in_range(num=option_data[-1], high=merger[0]+1.5*merger[1], low=merger[0]-1.5*merger[1]))):
#creates a new sheet for the passed in workbook
new_sheet = wb.create_sheet()
#increment the sheet count by 1
sheet_count +=1
#'/' aren't allowed in excel sheet names, so we replace them with '-' if the name contains '/'
new_sheet.title = option_data[1].replace('/', '-')
#zip creates a tuple pair for each item of the passed in lists. this tuple can then be appended to the sheet
for data in zip(option_data_labels,option_data):
new_sheet.append(data)
#loop through every value of total_data_headers and add it to the worksheet at the specified data_header_row
for (index, value) in enumerate(total_data_headers, start= 1) :
new_sheet.cell(row = data_header_row,column = index ).value = value
#add the BDH formula to the sheet
new_sheet.cell(row=data_header_row+1, column=2).value = abxl.add_option_BDH( security_name = option_data[0],
fields = data_table_header,
start_date = data_sheet[sheet_start_date_cell].value,
end_date = 'B4',
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
else:
print('Not a valid option description. Could not create new workbook sheets for {}'.format(des_cell))
continue
#save the workbook
wb.save(workbook_path)
wb_name = workbook_path.split('/')[-1]
data='Saving workbook with {} new tabs: {} \n'.format(sheet_count,wb_name)
store_data_to_txt_file(file_name='option_sheets', data=data)
def format_option_description(security_name, option_description):
'''
security_name should be a string that looks similar to 'BBG00673J6L5 Equity'
option_description should be a string that looks similar to 'PFE US 12/20/14 P18'
return formatted option data
'''
warnings.warn('''deprecated: moved to data_workbooks module. For simple parsing of an option description instantiate an Option_Chain_Sheet
and use the parse_option_description mehtod. for a complete, formated description use the option_metadata, mehtod in
the Option_Workbook class.''', DeprecationWarning)
#will split the option_description by whitespace into a list that looks like: ['PFE', 'US', '12/20/14', 'P18']
description_list = option_description.split(' ')
#determins the option type based on description_list[-1][0] = 'P'
if description_list[-1][0] =='P':
option_type = 'Put'
elif description_list[-1][0] == 'C':
option_type = 'Call'
#description_list[2] = 12/20/14 and convertis it into a datetime object
expiration_date = dt.datetime.strptime(description_list[2],'%m/%d/%y').date()
#description_list[-1][1:] = '18', and converts the string to an int
try:
strike_price = int(description_list[-1][1:])
#if the string was a floating point number like 18.5, convert it to a float
except:
strike_price = float(description_list[-1][1:])
option_data_list = [security_name, option_description, option_type, expiration_date, strike_price]
return option_data_list
def update_workbook_data_index(workbook_path, data_start_row, index_column):
'''
Given a workbook, loop through all the sheets of that workbook and update the index for each sheet.
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the add_index_to_sheets
method in the Option_Workbook class''',DeprecationWarning)
#loads an excel workbook given the file path to that workbook.
wb = openpyxl.load_workbook(workbook_path)
#gets a list of all the sheets in the workbook
sheet_list = wb.sheetnames
#in case index column was passed in as a character, convert it to an integer
index_column= convert_to_numbers(index_column)
#iterates through every sheet
for (index, sheet_name) in enumerate(sheet_list):
#indexing starts at 0.
if index == 0:
#get the announcement date from the first sheet
sheet = wb[sheet_name]
announcement_date = sheet['B5'].value
#if the sheet_name matches the stock sheet pattern:
if re.match(STOCK_SHEET_PATTERN, sheet_name):
#load the stock sheet and save it to the stock_sheet variable
stock_sheet = wb[sheet_name]
total_rows = stock_sheet.max_row
update_sheet_index(reference_sheet= stock_sheet, date=announcement_date, start_row=data_start_row)
#elif the sheet_name matches an options contract sheet
elif(re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name)):
#load the option sheet and save it to the option_sheet variable
option_sheet = wb[sheet_name]
copy_data(reference_sheet=stock_sheet, main_sheet=option_sheet, index_start_row=data_start_row,
index_end_row=total_rows, reference_data_column=index_column, main_data_column=index_column)
wb.save(workbook_path)
print('Indexed each sheet. Saving workbook...')
def update_sheet_index(reference_sheet, date, start_row):
'''
Given an excel worksheet,a designated date, and a starting row,
an index is added for each date relative to the specified date and row
'''
warnings.warn('''deprecated: moved to data_workbooks module.
Use the add_index method in the Stock_Sheet class''',DeprecationWarning)
#gets the total number of rows in the worksheet
total_rows = reference_sheet.max_row
#returns the row index of the reference_sheet containg the date value
index_0 =find_index_0(worksheet=reference_sheet,start= start_row, end=total_rows, date_col=2, date_0= date)
#iterates over every column in the given date_column from the start to the end and add the index value to the cell
for index in range(start_row, total_rows+1):
reference_sheet.cell(row= index, column=1).value = index - index_0
def update_read_data_only(file_path):
'''
Opens an Excel workbook in read_only mode, removing links to function calls, but maintaing the values stored in each cell.
'''
warnings.warn('''Completely removed''',DeprecationWarning)
wb = openpyxl.load_workbook(file_path, data_only= True)
wb.save(file_path)
return wb
def store_data_to_txt_file(file_name, data,file_path=OUTPUT_DIR):
'''
Given a file path, output data from a function is stored
'''
warnings.warn('''Completely removed''',DeprecationWarning)
#full file path
complete_path = '{}/{}.{}'.format(file_path,file_name,'txt')
#check if the file exists
if os.path.exists(file_path):
#if the file exisist open it to append
f = open(complete_path, 'a')
f.write(data)
f.close()
#else creat the file_path
else:
os.makedirs(file_path, exist_ok=False)
f = open(complete_path, 'w')
f.write(data)
f.close()
def delet_workbook_option_sheets(workbook_path):
'''
Given the file path to a workbook, all the option sheets are deleted
'''
warnings.warn('''Completely removed''',DeprecationWarning)
wb = openpyxl.load_workbook(workbook_path)
start_sheet_num = len(wb.sheetnames)
#set the active sheet in the workbook to the first sheet:
wb.active = 0
for (index,sheet) in enumerate(wb.sheetnames):
#if the sheet is an option sheet
if(re.match(OPTION_SHEET_PATTERN_INT, sheet)) or (re.match(OPTION_SHEET_PATTERN_FLOAT, sheet)):
del wb[sheet]
end_sheet_num = len(wb.sheetnames)
deleted_sheet_num = start_sheet_num - end_sheet_num
wb_name = workbook_path.split('/')[-1]
data ='Deleted {} sheets from {} \n'.format(deleted_sheet_num, wb_name)
store_data_to_txt_file(file_name= 'deleted_sheets', data= data)
wb.save(workbook_path)
def find_index_0(worksheet,start, end, date_col, date_0):
'''
binary search function to determine which row index of the worksheet
contains the date we're looking for.
worksheet Should be an openpyxl worksheet object
start Should be an index >=1
end Should be an index <= total rows of the given worksheet
date_col Should be the column containing dates to search through. 1=A, 2=B, 3=C, etc.
date_0 The specific date to search for
'''
#list comprehesion for all the row indexes.
warnings.warn('''deprecated: moved to data_workbooks module.
Use the row_index_by_date method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
index_list = [x for x in range(start,end+1)]
start_index = index_list[0]
end_index = index_list[-1]
average_index = floor((end_index + start_index)/2)
#variable for the while loop
found = False
while not found:
#print(start_index, found)
curr_date = worksheet.cell(row=average_index, column=date_col).value
if (date_0 == curr_date):
found = True
elif (date_0 > curr_date):
start_index = average_index +1
average_index = floor((end_index + start_index)/2)
elif (date_0 < curr_date):
end_index = average_index -1
average_index = floor((end_index + start_index)/2)
return average_index
def copy_data(reference_sheet, main_sheet,index_start_row, index_end_row, reference_data_column, main_data_column):
'''
Copies data from the reference_sheet to the main_sheet. index_start_row is assumed to be the same for both the reference_sheet and main_sheet
reference_sheet: Should be an openpyxl worksheet object that data will be coppied over from.
main_sheet: Should be an openpyxl worksheet object taht data will be coppied to.
index_start_row: Should be an integer that specifies the row of the worksheet to start copying from.
index_end_row: Should be an interger that specifies the last row of the worksheet that data should be coppied from.
reference_data_column: Can either be an integer that specifies which column from the reference worksheet contains the data to copied
or the letter associated with the data column 1=A, 2=B, C=3, etc.
main_data_column: Can either be an integer that specifies which column in the main worksheet the data should be copied to
or the letter associated with the data column 1=A, 2=B, C=3, etc.
'''
warnings.warn('''deprecated: moved to data_workbooks module.
Use the copy_data method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
for i in range(index_start_row, index_end_row+1):
#if the value is a datetime.datetime object
if type(reference_sheet.cell(row= i, column= reference_data_column).value) == dt.datetime:
main_sheet.cell(row=i, column=main_data_column).value = reference_sheet.cell(row=i, column=reference_data_column).value.date()
elif reference_sheet.cell(row= i, column= reference_data_column).value == None:
continue
else:
main_sheet.cell(row=i, column=main_data_column).value = reference_sheet.cell(row=i, column=reference_data_column).value
def update_stock_price_sheet(workbook_path, sheet_name, stock_sheet_index, sheet_start_date_cell,sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None ):
'''
Adds a sheet with stock price information to the workbook
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
create_stock_sheet method in the Option_Workbook class''',DeprecationWarning)
#load the given workbook
wb = openpyxl.load_workbook(workbook_path)
#gets the reference sheet
reference_sheet = wb[sheet_name]
ticker = '{} {}'.format(reference_sheet['B2'].value, reference_sheet['B3'].value)
#create a new sheet, and makes it the second sheet in the workbook. sheet indexing starts at 0.
new_sheet = wb.create_sheet(index=stock_sheet_index)
#sets the title of the new worksheet
new_sheet.title = ticker
#basic data to be added to the sheet
data = [('Company Name', reference_sheet['B1'].value),
('Company Ticker',ticker),
('Start Date', reference_sheet[sheet_start_date_cell].value),
('Announcement Date',reference_sheet[sheet_announce_date_cell].value),
('End Date',reference_sheet[sheet_end_date_cell].value)]
#appends the data to the top of the spreadsheet
for (index,data_lst) in enumerate(data):
new_sheet.append(data_lst)
#combines both passed lists:
total_headers = data_table_index + data_table_header
#set the index and column headers for the worksheet
for (index, value) in enumerate(total_headers, start= 1):
new_sheet.cell(row=data_header_row,column=index).value = value
if value.upper() == ('DATE'):
#sets the BDH function into place
new_sheet.cell(row= data_header_row+1, column= index).value = abxl.add_option_BDH(security_name = data[1][1],
fields = data_table_header,
start_date = reference_sheet[sheet_start_date_cell].value,
end_date = reference_sheet[sheet_end_date_cell].value,
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
#saves the newly added sheet to the workbook.
wb.save(workbook_path)
wb_name = workbook_path.split('/')[-1]
data = 'Added {} sheet to workbook: {}\n'.format(ticker, wb_name)
store_data_to_txt_file(file_name= 'stock_sheets', data= data)
def update_workbook_average_column(reference_wb_path, column_header, header_row, data_start_row, ignore_sheet_list=[]):
'''
Given the path to an excel workbook, Averages are calculated for each sheet of data
'''
warnings.warn('''Completely removed''',DeprecationWarning)
#loads an excel workbook from the given file_path
reference_wb = openpyxl.load_workbook(reference_wb_path, data_only=True)
#returns a dictionary of 'sheet_names':[column data indexes] for each sheet of the given workbook
sheet_data_columns =find_column_index_by_header(reference_wb= reference_wb, column_header= column_header, header_row= header_row)
#removes any sheets that are ment to be ignored if provided
if ignore_sheet_list != []:
#iterates over every sheet name passed into ignore_sheet_list
for index, ignore_sheet in enumerate(ignore_sheet_list):
#removes the sheet name from the dictionary sheet_data_columns, so that it wont be iterated over next
sheet_data_columns.pop(ignore_sheet)
#iterate over each key(sheet_name) in sheet_data_columns:
for (index,key) in enumerate(sheet_data_columns):
#update the given sheet with the average column
update_sheet_average_column(reference_wb= reference_wb,
sheet_name= key,
data_columns= sheet_data_columns[key],
data_start_row= data_start_row,
column_header= column_header)
#saves the excel workbook
reference_wb.save(reference_wb_path)
print('Saving Workbook...')
def update_sheet_average_column(reference_wb,sheet_name,data_columns, data_start_row, column_header):
'''
Given an excel worksheet, and a specified list of columns, averages are calcualted for each row of the data
'''
#loads the sheet of the reference_wb
warnings.warn('''Completely removed''',DeprecationWarning)
sheet = reference_wb.get_sheet_by_name(sheet_name)
#gets the max row of the sheet
max_row = sheet.max_row
#gets the max column of the sheet
max_col = sheet.max_column
#sets the header for the average column to the average_col_header and places it one row above the data
sheet.cell(row=data_start_row-1, column=max_col+1).value = '{} {} {}'.format(sheet_name, 'Average', column_header)
#iterate over each row of the workbook:
for i in range(data_start_row,max_row+1):
#an empty lest to store the values for the cells of the given row
cell_values = []
#iterate over each cell in the data column
for (index, column_ref) in enumerate(data_columns):
#if the value of the cell isn't 0, append it to the cell_values list
if sheet.cell(row=i, column=column_ref).value != 0:
cell_values.append(sheet.cell(row=i, column=column_ref).value)
#assing the value of the average column
#if the cell_values list is an empyt list
if cell_values == []:
#set the value of the cell to 0
sheet.cell(row=i, column=max_col+1).value = 0
#else cell_values is populated
else:
#set the value of the average column to the mean of the cell_values
sheet.cell(row=i, column=max_col+1).value = statistics.mean(cell_values)
def find_column_index_by_header(reference_wb, column_header, header_row):
'''
Returns a dictionary where the key is the sheet name, and the value is the column where the specified header was located
'''
#an empty dictionary that will store the sheet_name as the key, and a list of data_columns as the key's value
warnings.warn('''Completely removed''',DeprecationWarning)
data_columns_by_sheet= {}
#iterate over all the sheetnames in the workbook
for (index,sheet_name) in enumerate(reference_wb.sheetnames):
#load the given worksheet.
sheet = reference_wb[sheet_name]
#get the max_column for the worksheet:
max_col =sheet.max_column
#add a key in the dictionary for the given sheet
data_columns_by_sheet.setdefault(sheet_name, [])
#loop through all the cells in the header_row
for i in range(max_col+1):
#If the value in the column header matches the header_value we're searching for, then append the column index to the key's list:
if column_header == sheet.cell(row=header_row, column=i+1).value:
data_columns_by_sheet[sheet_name].append(i+1)
#if no columns were found, remove that key from the dictionary
if data_columns_by_sheet[sheet_name] == []:
data_columns_by_sheet.pop(sheet_name)
#return the dictionary with the data for each sheet
return data_columns_by_sheet
def stock_data_to_list(reference_wb,price_column_header, header_start_row, start_index, end_index):
'''
Given the file path to a workbook, data in a particular cell is added to a list and then the list is returned
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
px_last_lst method in the Stock_Sheet class''',DeprecationWarning)
#returns a dictionary with {'sheet_name':[data_column]}
data_column = find_column_index_by_header(reference_wb = reference_wb, column_header= price_column_header, header_row= header_start_row)
#data list to store all the values:
data_list = []
#iterate over all the keys in the data_column:
for (index,key) in enumerate(data_column):
if re.match(STOCK_SHEET_PATTERN, key):
#load the worksheet
sheet=reference_wb[key]
for i in range(start_index, end_index+1):
if sheet.cell(row=i,column=data_column[key][0]).value !=0:
data_list.append(sheet.cell(row=i,column=data_column[key][0]).value)
#return the data_list
return data_list
def data_average(data_list):
'''
returns the average of a given list, rounded down to the nearest whole number
'''
warnings.warn('''Completely removed''',DeprecationWarning)
return floor(mean(data_list))
def data_standard_dev(data_list):
'''
returns the standard deviation of a given list, rounded up to the nearest whole number
'''
warnings.warn('''Completely removed''',DeprecationWarning)
return ceil(stdev(data=data_list))
def historic_stock_mean_and_std(reference_wb_path,price_column_header, header_start_row, date_0):
'''
calculates the mean and standard deviation for prices up to the announcemnt date
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
historic_mean, and historic_std, method in the Stock_Sheet class''',DeprecationWarning)
#loads the workbook and the specified sheet
wb = openpyxl.load_workbook(reference_wb_path)
#get the second sheet in the workbook
sheet = wb[wb.sheetnames[1]]
total_rows=sheet.max_row
index0=find_index_0(worksheet=sheet,start=header_start_row+1, end=total_rows, date_col=2, date_0=date_0)
data_list=stock_data_to_list(reference_wb=wb, price_column_header=price_column_header,
header_start_row=header_start_row, start_index=header_start_row+1, end_index=index0)
average = data_average(data_list)
st_dev = data_standard_dev(data_list)
return(average, st_dev)
def merger_stock_mean_and_std(reference_wb_path, price_column_header, header_start_row, date_0):
'''
calculates the mean and standard deviation for prices from the merger date to the end of the M&A
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
merger_mean, and merger_std, method in the Stock_Sheet class''',DeprecationWarning)
wb = openpyxl.load_workbook(reference_wb_path)
#get the second sheet in the workbook
sheet = wb[wb.sheetnames[1]]
total_rows=sheet.max_row
index0=find_index_0(worksheet=sheet,start=header_start_row+1, end=total_rows, date_col=2, date_0=date_0)
data_list=stock_data_to_list(reference_wb=wb, price_column_header=price_column_header,
header_start_row=header_start_row, start_index=index0, end_index=total_rows)
average = data_average(data_list)
st_dev = data_standard_dev(data_list)
return(average, st_dev)
def is_in_range(num, high, low):
'''
Given a number, and a high and low range, True is returned if the number is within the range
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
is_strike_in_range method in the Stock_Sheet class''',DeprecationWarning)
return low <= num <= high
def fill_option_wb_empty_cells(reference_wb_path, column_start, row_start, fill_value):
'''
Goes through each sheet and fills in the blanks with the designated fill_vale
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
fill_option_sheet method in the Option_Workbook class''',DeprecationWarning)
#load the workbook
wb = openpyxl.load_workbook(reference_wb_path)
#iterate over each sheet
for (index,sheet_name) in enumerate(wb.sheetnames):
#if the sheet is an option sheet
if re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name):
sheet = wb[sheet_name]
fill_option_sheet_empty_cells(reference_sheet=sheet,column_start= column_start, row_start= row_start, fill_value= fill_value)
#save the workbook:
wb.save(reference_wb_path)
print('Done filling empty cells with {}.'.format(fill_value))
def fill_option_sheet_empty_cells(reference_sheet, column_start, row_start, fill_value):
'''
Goes through a sheet and fills in the empty cells with the designated fill_value
'''
warnings.warn('''deprecated: moved to data_workbooks module. Use the
fill_empty_cells method in the Option_Sheet class''',DeprecationWarning)
#get the max_rows
total_rows=reference_sheet.max_row
#get the max_columns
total_columns = reference_sheet.max_column
#iterate_over_each column:
for i in range(column_start, total_columns+1):
#iterate over each row:
for j in range(row_start, total_rows+1):
if reference_sheet.cell(row=j, column=i).value == None:
reference_sheet.cell(row=j, column=i).value = fill_value
## added to Data_Worksheet as letter_to_col_index
#########################Tested
def convert_to_numbers(lst):
'''
Takes a list or a single character, and returns an integer, where A=1, B=2, C=3,...etc.
'''
warnings.warn('''deprecated: moved to data_workbooks module.
Use the letter_to_col_index method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
#if lst is passed in as just a single value
if type(lst) == str:
lst = openpyxl.utils.column_index_from_string(lst)
else:
#if lst is passed in as a list
for (index, value) in enumerate(lst):
if type(value) == str:
lst[index] = openpyxl.utils.column_index_from_string(value)
return lst
def add_extra_sheets(reference_wb_path, sheet_name, ticker_column, description_column,sheet_start_date_cell, sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None):
'''
Given a workbook containing option contract tickers and desctiptions, new sheets are added to the workbook if they don't already exist
'''
#combine data_table_index and data_table_header
total_data_headers = data_table_index+data_table_header
#data labels to be added to the new excel worksheet
option_data_labels = ['Security Name', 'Description', 'Type', 'Expiration Date', 'Strike Price']
#given the file path, an excel workbook is loaded.
wb = openpyxl.load_workbook(reference_wb_path)
#The sheet we want to get data from is set to the variable data_sheet
data_sheet = wb.get_sheet_by_name(sheet_name)
#gets the total rows of the worksheet
total_rows = data_sheet.max_row
#counter to keep track of each sheet created
sheet_count = 0
#gets the average stock price and standard deviation of the stock price data for the historic and merger period:
historic = historic_stock_mean_and_std(reference_wb_path=reference_wb_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
merger = merger_stock_mean_and_std(reference_wb_path=reference_wb_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
while (data_sheet.cell(row=total_rows, column= description_column).value).replace('/','-') not in wb.get_sheet_names():
#format_option_description() returns the following list:
#[security_name, option_description, option_type, expiration_date, strike_price]
option_data = format_option_description(data_sheet.cell(row=total_rows, column=ticker_column).value,
data_sheet.cell(row=total_rows, column=description_column).value)
if (re.match(OPTION_DESCRIPTION_PATTERN_INT, option_data[1]) or re.match(OPTION_DESCRIPTION_PATTERN_FLOAT,option_data[1])):
#check to see if the stike is within 1.5 standard deviation of the historical and merger stock mean
if ((is_in_range(num=option_data[-1], high=historic[0]+1.5*historic[1], low=historic[0]-1.5*historic[1])) or (is_in_range(num=option_data[-1], high=merger[0]+1.5*merger[1], low=merger[0]-1.5*merger[1]))):
#creates a new sheet for the passed in workbook
new_sheet = wb.create_sheet()
#increment the sheet count by 1
sheet_count +=1
#/' aren't allowed in excel sheet names, so we replace them with '-' if the name contains '/'
new_sheet.title = option_data[1].replace('/', '-')
#zip creates a tuple pair for each item of the passed in lists. this tuple can then be appended to the sheet
for data in zip(option_data_labels,option_data):
new_sheet.append(data)
#loop through every value of total_data_headers and add it to the worksheet at the specified data_header_row
for (index, value) in enumerate(total_data_headers, start= 1) :
new_sheet.cell(row = data_header_row,column = index ).value = value
#add the BDH formula to the sheet
new_sheet['B{}'.format(data_header_row+1)] = abxl.add_option_BDH( security_name = option_data[0],
fields = data_table_header,
start_date = data_sheet[sheet_start_date_cell].value,
end_date = 'B4',
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
total_rows -= 1
wb.save(reference_wb_path)
print('Added {} new sheets to the workbook'.format(sheet_count))
def update_workbook_days_till_expiration(reference_wb_path, data_start_row, date_col, calculation_col):
'''
Updates each option sheet in the workbook to contain the days till expiration for that sheets option
'''
warnings.warn('''Completely removed''',DeprecationWarning)
#loads the workbook
wb = openpyxl.load_workbook(reference_wb_path)
#converts date_col and calculation_col if passed as letters
date_col=convert_to_numbers(date_col)
calculation_col = convert_to_numbers(calculation_col)
#loop through all the sheets in the workbook
for (index, sheet_name) in enumerate(wb.get_sheet_names()):
#if the sheet_name matches an option sheet:
if re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name):
#get the sheet
sheet = wb.get_sheet_by_name(sheet_name)
update_sheet_days_till_expiration(reference_sheet= sheet, data_start_row= data_start_row,
date_col= date_col, calculation_col= calculation_col)
#save changes
wb.save(reference_wb_path)
def update_sheet_days_till_expiration(reference_sheet, data_start_row, date_col, calculation_col):
'''
loops over each row containing option data and returns the days till expiration in the designated column
'''
warnings.warn('''Completely removed''',DeprecationWarning)
#sets the total rows of the worksheet
total_rows = reference_sheet.max_row
#sets the expiratioin date
exp_date = reference_sheet['B4'].value
#sets the header of the column
reference_sheet.cell(row=data_start_row-1, column=calculation_col).value = 'DTE'
#loops thehrough each row from data_start_row till total_rows
for i in range(data_start_row, total_rows+1):
if reference_sheet.cell(row=i, column=date_col).value == None:
break
else:
curr_date = reference_sheet.cell(row=i, column=date_col).value
reference_sheet.cell(row=i, column=calculation_col).value = days_till_expiration(start_date=curr_date,
expiration_date=exp_date)
def days_till_expiration(start_date, expiration_date):
'''
Given an expiration date and a a starting date, the days to expiration is calculated
'''
warnings.warn('''Removed. Yet to be reimplimented''',DeprecationWarning)
return (expiration_date-start_date).days
| 51.535393 | 258 | 0.676348 | import warnings
import openpyxl
import os
import datetime as dt
import re
from statistics import mean, stdev
from math import ceil, floor
import add_bloomberg_excel_functions as abxl
from CONSTANTS import ( OPTION_DESCRIPTION_PATTERN_INT, OPTION_DESCRIPTION_PATTERN_FLOAT, OPTION_SHEET_PATTERN_INT, OPTION_SHEET_PATTERN_FLOAT,
STOCK_SHEET_PATTERN, OUTPUT_DIR)
def update_sheet_with_BDP_description(workbook_path, sheet_name, starting_col, starting_row):
warnings.warn('''deprecated: Please instantiate an Option_Chain_Sheet class
from the data_workbooks module. then use the equivalent sheet_BDP_description method''',
DeprecationWarning)
wb = openpyxl.load_workbook(workbook_path)
sheet = wb[sheet_name]
total_rows = sheet.max_row
total_columns = sheet.max_column
unique_ticker = []
for i in range(starting_col, total_columns+1, 2):
for j in range(starting_row, total_rows+1):
if sheet.cell(row=j, column=i).value == None:
break
else:
if sheet.cell(row=j, column=i).value not in unique_ticker:
unique_ticker.append(sheet.cell(row=j, column=i).value)
sheet.cell(row=j, column= i+1).value = abxl.add_BDP_fuction(sheet.cell(row=j, column=i).coordinate, "SECURITY_DES")
ok_path)
def update_option_contract_sheets(workbook_path, sheet_name,starting_col,starting_row, sheet_start_date_cell, sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None):
warnings.warn('''deprecated: Please instantiate an Option_Workbook class
from the data_workbooks module. then use the equivalent create_option_sheet method''',
DeprecationWarning)
total_data_headers = data_table_index+data_table_header
option_data_labels = ['Security Name', 'Description', 'Type', 'Expiration Date', 'Strike Price']
wb = openpyxl.load_workbook(workbook_path)
data_sheet = wb[sheet_name]
if type(data_sheet[sheet_end_date_cell].value) == int:
start_date = dt.datetime.strptime(str(data_sheet[sheet_start_date_cell].value),'%Y%m%d').date()
else:
start_date= data_sheet[sheet_start_date_cell].value.date()
if type(data_sheet[sheet_announce_date_cell].value) == int:
announcement_date = dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d').date()
else:
announcement_date= data_sheet[sheet_announce_date_cell].value.date()
total_rows = data_sheet.max_row
total_columns = data_sheet.max_column
sheet_count = 0
historic = historic_stock_mean_and_std(reference_wb_path=workbook_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
merger = merger_stock_mean_and_std(reference_wb_path=workbook_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
for i in range(starting_col, total_columns+1, 2):
for j in range(starting_row, total_rows+1):
ticker_cell = data_sheet.cell(row=j, column=i).value
des_cell = data_sheet.cell(row=j, column=i+1).value
if ((ticker_cell != None) and (des_cell != None)):
if (re.match(OPTION_DESCRIPTION_PATTERN_INT, des_cell) or re.match(OPTION_DESCRIPTION_PATTERN_FLOAT, des_cell)) :
option_data = format_option_description(ticker_cell, des_cell)
expiration_from_start = (option_data[3] - start_date).days
days_past_announcemt = (option_data[3]- announcement_date).days
if (expiration_from_start < 8) or (days_past_announcemt > 60) :
pass
else:
if ((is_in_range(num=option_data[-1], high=historic[0]+1.5*historic[1], low=historic[0]-1.5*historic[1])) or (is_in_range(num=option_data[-1], high=merger[0]+1.5*merger[1], low=merger[0]-1.5*merger[1]))):
new_sheet = wb.create_sheet()
sheet_count +=1
new_sheet.title = option_data[1].replace('/', '-')
#zip creates a tuple pair for each item of the passed in lists. this tuple can then be appended to the sheet
for data in zip(option_data_labels,option_data):
new_sheet.append(data)
#loop through every value of total_data_headers and add it to the worksheet at the specified data_header_row
for (index, value) in enumerate(total_data_headers, start= 1) :
new_sheet.cell(row = data_header_row,column = index ).value = value
#add the BDH formula to the sheet
new_sheet.cell(row=data_header_row+1, column=2).value = abxl.add_option_BDH( security_name = option_data[0],
fields = data_table_header,
start_date = data_sheet[sheet_start_date_cell].value,
end_date = 'B4',
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
else:
print('Not a valid option description. Could not create new workbook sheets for {}'.format(des_cell))
continue
#save the workbook
wb.save(workbook_path)
wb_name = workbook_path.split('/')[-1]
data='Saving workbook with {} new tabs: {} \n'.format(sheet_count,wb_name)
store_data_to_txt_file(file_name='option_sheets', data=data)
def format_option_description(security_name, option_description):
warnings.warn('''deprecated: moved to data_workbooks module. For simple parsing of an option description instantiate an Option_Chain_Sheet
and use the parse_option_description mehtod. for a complete, formated description use the option_metadata, mehtod in
the Option_Workbook class.''', DeprecationWarning)
#will split the option_description by whitespace into a list that looks like: ['PFE', 'US', '12/20/14', 'P18']
description_list = option_description.split(' ')
#determins the option type based on description_list[-1][0] = 'P'
if description_list[-1][0] =='P':
option_type = 'Put'
elif description_list[-1][0] == 'C':
option_type = 'Call'
#description_list[2] = 12/20/14 and convertis it into a datetime object
expiration_date = dt.datetime.strptime(description_list[2],'%m/%d/%y').date()
#description_list[-1][1:] = '18', and converts the string to an int
try:
strike_price = int(description_list[-1][1:])
#if the string was a floating point number like 18.5, convert it to a float
except:
strike_price = float(description_list[-1][1:])
option_data_list = [security_name, option_description, option_type, expiration_date, strike_price]
return option_data_list
def update_workbook_data_index(workbook_path, data_start_row, index_column):
warnings.warn('''deprecated: moved to data_workbooks module. Use the add_index_to_sheets
method in the Option_Workbook class''',DeprecationWarning)
#loads an excel workbook given the file path to that workbook.
wb = openpyxl.load_workbook(workbook_path)
#gets a list of all the sheets in the workbook
sheet_list = wb.sheetnames
#in case index column was passed in as a character, convert it to an integer
index_column= convert_to_numbers(index_column)
#iterates through every sheet
for (index, sheet_name) in enumerate(sheet_list):
#indexing starts at 0.
if index == 0:
#get the announcement date from the first sheet
sheet = wb[sheet_name]
announcement_date = sheet['B5'].value
#if the sheet_name matches the stock sheet pattern:
if re.match(STOCK_SHEET_PATTERN, sheet_name):
#load the stock sheet and save it to the stock_sheet variable
stock_sheet = wb[sheet_name]
total_rows = stock_sheet.max_row
update_sheet_index(reference_sheet= stock_sheet, date=announcement_date, start_row=data_start_row)
#elif the sheet_name matches an options contract sheet
elif(re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name)):
#load the option sheet and save it to the option_sheet variable
option_sheet = wb[sheet_name]
copy_data(reference_sheet=stock_sheet, main_sheet=option_sheet, index_start_row=data_start_row,
index_end_row=total_rows, reference_data_column=index_column, main_data_column=index_column)
wb.save(workbook_path)
print('Indexed each sheet. Saving workbook...')
def update_sheet_index(reference_sheet, date, start_row):
warnings.warn('''deprecated: moved to data_workbooks module.
Use the add_index method in the Stock_Sheet class''',DeprecationWarning)
#gets the total number of rows in the worksheet
total_rows = reference_sheet.max_row
#returns the row index of the reference_sheet containg the date value
index_0 =find_index_0(worksheet=reference_sheet,start= start_row, end=total_rows, date_col=2, date_0= date)
#iterates over every column in the given date_column from the start to the end and add the index value to the cell
for index in range(start_row, total_rows+1):
reference_sheet.cell(row= index, column=1).value = index - index_0
def update_read_data_only(file_path):
warnings.warn('''Completely removed''',DeprecationWarning)
wb = openpyxl.load_workbook(file_path, data_only= True)
wb.save(file_path)
return wb
def store_data_to_txt_file(file_name, data,file_path=OUTPUT_DIR):
warnings.warn('''Completely removed''',DeprecationWarning)
#full file path
complete_path = '{}/{}.{}'.format(file_path,file_name,'txt')
#check if the file exists
if os.path.exists(file_path):
#if the file exisist open it to append
f = open(complete_path, 'a')
f.write(data)
f.close()
#else creat the file_path
else:
os.makedirs(file_path, exist_ok=False)
f = open(complete_path, 'w')
f.write(data)
f.close()
def delet_workbook_option_sheets(workbook_path):
warnings.warn('''Completely removed''',DeprecationWarning)
wb = openpyxl.load_workbook(workbook_path)
start_sheet_num = len(wb.sheetnames)
#set the active sheet in the workbook to the first sheet:
wb.active = 0
for (index,sheet) in enumerate(wb.sheetnames):
#if the sheet is an option sheet
if(re.match(OPTION_SHEET_PATTERN_INT, sheet)) or (re.match(OPTION_SHEET_PATTERN_FLOAT, sheet)):
del wb[sheet]
end_sheet_num = len(wb.sheetnames)
deleted_sheet_num = start_sheet_num - end_sheet_num
wb_name = workbook_path.split('/')[-1]
data ='Deleted {} sheets from {} \n'.format(deleted_sheet_num, wb_name)
store_data_to_txt_file(file_name= 'deleted_sheets', data= data)
wb.save(workbook_path)
def find_index_0(worksheet,start, end, date_col, date_0):
#list comprehesion for all the row indexes.
warnings.warn('''deprecated: moved to data_workbooks module.
Use the row_index_by_date method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
index_list = [x for x in range(start,end+1)]
start_index = index_list[0]
end_index = index_list[-1]
average_index = floor((end_index + start_index)/2)
#variable for the while loop
found = False
while not found:
#print(start_index, found)
curr_date = worksheet.cell(row=average_index, column=date_col).value
if (date_0 == curr_date):
found = True
elif (date_0 > curr_date):
start_index = average_index +1
average_index = floor((end_index + start_index)/2)
elif (date_0 < curr_date):
end_index = average_index -1
average_index = floor((end_index + start_index)/2)
return average_index
def copy_data(reference_sheet, main_sheet,index_start_row, index_end_row, reference_data_column, main_data_column):
warnings.warn('''deprecated: moved to data_workbooks module.
Use the copy_data method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
for i in range(index_start_row, index_end_row+1):
#if the value is a datetime.datetime object
if type(reference_sheet.cell(row= i, column= reference_data_column).value) == dt.datetime:
main_sheet.cell(row=i, column=main_data_column).value = reference_sheet.cell(row=i, column=reference_data_column).value.date()
elif reference_sheet.cell(row= i, column= reference_data_column).value == None:
continue
else:
main_sheet.cell(row=i, column=main_data_column).value = reference_sheet.cell(row=i, column=reference_data_column).value
def update_stock_price_sheet(workbook_path, sheet_name, stock_sheet_index, sheet_start_date_cell,sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None ):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
create_stock_sheet method in the Option_Workbook class''',DeprecationWarning)
#load the given workbook
wb = openpyxl.load_workbook(workbook_path)
#gets the reference sheet
reference_sheet = wb[sheet_name]
ticker = '{} {}'.format(reference_sheet['B2'].value, reference_sheet['B3'].value)
#create a new sheet, and makes it the second sheet in the workbook. sheet indexing starts at 0.
new_sheet = wb.create_sheet(index=stock_sheet_index)
#sets the title of the new worksheet
new_sheet.title = ticker
#basic data to be added to the sheet
data = [('Company Name', reference_sheet['B1'].value),
('Company Ticker',ticker),
('Start Date', reference_sheet[sheet_start_date_cell].value),
('Announcement Date',reference_sheet[sheet_announce_date_cell].value),
('End Date',reference_sheet[sheet_end_date_cell].value)]
#appends the data to the top of the spreadsheet
for (index,data_lst) in enumerate(data):
new_sheet.append(data_lst)
#combines both passed lists:
total_headers = data_table_index + data_table_header
#set the index and column headers for the worksheet
for (index, value) in enumerate(total_headers, start= 1):
new_sheet.cell(row=data_header_row,column=index).value = value
if value.upper() == ('DATE'):
#sets the BDH function into place
new_sheet.cell(row= data_header_row+1, column= index).value = abxl.add_option_BDH(security_name = data[1][1],
fields = data_table_header,
start_date = reference_sheet[sheet_start_date_cell].value,
end_date = reference_sheet[sheet_end_date_cell].value,
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
#saves the newly added sheet to the workbook.
wb.save(workbook_path)
wb_name = workbook_path.split('/')[-1]
data = 'Added {} sheet to workbook: {}\n'.format(ticker, wb_name)
store_data_to_txt_file(file_name= 'stock_sheets', data= data)
def update_workbook_average_column(reference_wb_path, column_header, header_row, data_start_row, ignore_sheet_list=[]):
warnings.warn('''Completely removed''',DeprecationWarning)
#loads an excel workbook from the given file_path
reference_wb = openpyxl.load_workbook(reference_wb_path, data_only=True)
#returns a dictionary of 'sheet_names':[column data indexes] for each sheet of the given workbook
sheet_data_columns =find_column_index_by_header(reference_wb= reference_wb, column_header= column_header, header_row= header_row)
#removes any sheets that are ment to be ignored if provided
if ignore_sheet_list != []:
#iterates over every sheet name passed into ignore_sheet_list
for index, ignore_sheet in enumerate(ignore_sheet_list):
#removes the sheet name from the dictionary sheet_data_columns, so that it wont be iterated over next
sheet_data_columns.pop(ignore_sheet)
#iterate over each key(sheet_name) in sheet_data_columns:
for (index,key) in enumerate(sheet_data_columns):
#update the given sheet with the average column
update_sheet_average_column(reference_wb= reference_wb,
sheet_name= key,
data_columns= sheet_data_columns[key],
data_start_row= data_start_row,
column_header= column_header)
#saves the excel workbook
reference_wb.save(reference_wb_path)
print('Saving Workbook...')
def update_sheet_average_column(reference_wb,sheet_name,data_columns, data_start_row, column_header):
#loads the sheet of the reference_wb
warnings.warn('''Completely removed''',DeprecationWarning)
sheet = reference_wb.get_sheet_by_name(sheet_name)
#gets the max row of the sheet
max_row = sheet.max_row
#gets the max column of the sheet
max_col = sheet.max_column
#sets the header for the average column to the average_col_header and places it one row above the data
sheet.cell(row=data_start_row-1, column=max_col+1).value = '{} {} {}'.format(sheet_name, 'Average', column_header)
#iterate over each row of the workbook:
for i in range(data_start_row,max_row+1):
#an empty lest to store the values for the cells of the given row
cell_values = []
#iterate over each cell in the data column
for (index, column_ref) in enumerate(data_columns):
#if the value of the cell isn't 0, append it to the cell_values list
if sheet.cell(row=i, column=column_ref).value != 0:
cell_values.append(sheet.cell(row=i, column=column_ref).value)
if cell_values == []:
sheet.cell(row=i, column=max_col+1).value = 0
else:
sheet.cell(row=i, column=max_col+1).value = statistics.mean(cell_values)
def find_column_index_by_header(reference_wb, column_header, header_row):
warnings.warn('''Completely removed''',DeprecationWarning)
data_columns_by_sheet= {}
#iterate over all the sheetnames in the workbook
for (index,sheet_name) in enumerate(reference_wb.sheetnames):
#load the given worksheet.
sheet = reference_wb[sheet_name]
#get the max_column for the worksheet:
max_col =sheet.max_column
#add a key in the dictionary for the given sheet
data_columns_by_sheet.setdefault(sheet_name, [])
#loop through all the cells in the header_row
for i in range(max_col+1):
#If the value in the column header matches the header_value we're searching for, then append the column index to the key's list:
if column_header == sheet.cell(row=header_row, column=i+1).value:
data_columns_by_sheet[sheet_name].append(i+1)
#if no columns were found, remove that key from the dictionary
if data_columns_by_sheet[sheet_name] == []:
data_columns_by_sheet.pop(sheet_name)
#return the dictionary with the data for each sheet
return data_columns_by_sheet
def stock_data_to_list(reference_wb,price_column_header, header_start_row, start_index, end_index):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
px_last_lst method in the Stock_Sheet class''',DeprecationWarning)
#returns a dictionary with {'sheet_name':[data_column]}
data_column = find_column_index_by_header(reference_wb = reference_wb, column_header= price_column_header, header_row= header_start_row)
#data list to store all the values:
data_list = []
#iterate over all the keys in the data_column:
for (index,key) in enumerate(data_column):
if re.match(STOCK_SHEET_PATTERN, key):
#load the worksheet
sheet=reference_wb[key]
for i in range(start_index, end_index+1):
if sheet.cell(row=i,column=data_column[key][0]).value !=0:
data_list.append(sheet.cell(row=i,column=data_column[key][0]).value)
#return the data_list
return data_list
def data_average(data_list):
warnings.warn('''Completely removed''',DeprecationWarning)
return floor(mean(data_list))
def data_standard_dev(data_list):
warnings.warn('''Completely removed''',DeprecationWarning)
return ceil(stdev(data=data_list))
def historic_stock_mean_and_std(reference_wb_path,price_column_header, header_start_row, date_0):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
historic_mean, and historic_std, method in the Stock_Sheet class''',DeprecationWarning)
#loads the workbook and the specified sheet
wb = openpyxl.load_workbook(reference_wb_path)
#get the second sheet in the workbook
sheet = wb[wb.sheetnames[1]]
total_rows=sheet.max_row
index0=find_index_0(worksheet=sheet,start=header_start_row+1, end=total_rows, date_col=2, date_0=date_0)
data_list=stock_data_to_list(reference_wb=wb, price_column_header=price_column_header,
header_start_row=header_start_row, start_index=header_start_row+1, end_index=index0)
average = data_average(data_list)
st_dev = data_standard_dev(data_list)
return(average, st_dev)
def merger_stock_mean_and_std(reference_wb_path, price_column_header, header_start_row, date_0):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
merger_mean, and merger_std, method in the Stock_Sheet class''',DeprecationWarning)
wb = openpyxl.load_workbook(reference_wb_path)
#get the second sheet in the workbook
sheet = wb[wb.sheetnames[1]]
total_rows=sheet.max_row
index0=find_index_0(worksheet=sheet,start=header_start_row+1, end=total_rows, date_col=2, date_0=date_0)
data_list=stock_data_to_list(reference_wb=wb, price_column_header=price_column_header,
header_start_row=header_start_row, start_index=index0, end_index=total_rows)
average = data_average(data_list)
st_dev = data_standard_dev(data_list)
return(average, st_dev)
def is_in_range(num, high, low):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
is_strike_in_range method in the Stock_Sheet class''',DeprecationWarning)
return low <= num <= high
def fill_option_wb_empty_cells(reference_wb_path, column_start, row_start, fill_value):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
fill_option_sheet method in the Option_Workbook class''',DeprecationWarning)
#load the workbook
wb = openpyxl.load_workbook(reference_wb_path)
#iterate over each sheet
for (index,sheet_name) in enumerate(wb.sheetnames):
#if the sheet is an option sheet
if re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name):
sheet = wb[sheet_name]
fill_option_sheet_empty_cells(reference_sheet=sheet,column_start= column_start, row_start= row_start, fill_value= fill_value)
#save the workbook:
wb.save(reference_wb_path)
print('Done filling empty cells with {}.'.format(fill_value))
def fill_option_sheet_empty_cells(reference_sheet, column_start, row_start, fill_value):
warnings.warn('''deprecated: moved to data_workbooks module. Use the
fill_empty_cells method in the Option_Sheet class''',DeprecationWarning)
#get the max_rows
total_rows=reference_sheet.max_row
#get the max_columns
total_columns = reference_sheet.max_column
#iterate_over_each column:
for i in range(column_start, total_columns+1):
#iterate over each row:
for j in range(row_start, total_rows+1):
if reference_sheet.cell(row=j, column=i).value == None:
reference_sheet.cell(row=j, column=i).value = fill_value
## added to Data_Worksheet as letter_to_col_index
#########################Tested
def convert_to_numbers(lst):
warnings.warn('''deprecated: moved to data_workbooks module.
Use the letter_to_col_index method in the Data_WorkSheet class or any class that inherits from it''',DeprecationWarning)
#if lst is passed in as just a single value
if type(lst) == str:
lst = openpyxl.utils.column_index_from_string(lst)
else:
#if lst is passed in as a list
for (index, value) in enumerate(lst):
if type(value) == str:
lst[index] = openpyxl.utils.column_index_from_string(value)
return lst
def add_extra_sheets(reference_wb_path, sheet_name, ticker_column, description_column,sheet_start_date_cell, sheet_announce_date_cell, sheet_end_date_cell, data_header_row, data_table_index, data_table_header, BDH_optional_arg=None, BDH_optional_val=None):
#combine data_table_index and data_table_header
total_data_headers = data_table_index+data_table_header
#data labels to be added to the new excel worksheet
option_data_labels = ['Security Name', 'Description', 'Type', 'Expiration Date', 'Strike Price']
#given the file path, an excel workbook is loaded.
wb = openpyxl.load_workbook(reference_wb_path)
#The sheet we want to get data from is set to the variable data_sheet
data_sheet = wb.get_sheet_by_name(sheet_name)
#gets the total rows of the worksheet
total_rows = data_sheet.max_row
#counter to keep track of each sheet created
sheet_count = 0
#gets the average stock price and standard deviation of the stock price data for the historic and merger period:
historic = historic_stock_mean_and_std(reference_wb_path=reference_wb_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
merger = merger_stock_mean_and_std(reference_wb_path=reference_wb_path, price_column_header='PX_LAST', header_start_row=data_header_row, date_0=dt.datetime.strptime(str(data_sheet[sheet_announce_date_cell].value),'%Y%m%d'))
while (data_sheet.cell(row=total_rows, column= description_column).value).replace('/','-') not in wb.get_sheet_names():
#format_option_description() returns the following list:
#[security_name, option_description, option_type, expiration_date, strike_price]
option_data = format_option_description(data_sheet.cell(row=total_rows, column=ticker_column).value,
data_sheet.cell(row=total_rows, column=description_column).value)
if (re.match(OPTION_DESCRIPTION_PATTERN_INT, option_data[1]) or re.match(OPTION_DESCRIPTION_PATTERN_FLOAT,option_data[1])):
#check to see if the stike is within 1.5 standard deviation of the historical and merger stock mean
if ((is_in_range(num=option_data[-1], high=historic[0]+1.5*historic[1], low=historic[0]-1.5*historic[1])) or (is_in_range(num=option_data[-1], high=merger[0]+1.5*merger[1], low=merger[0]-1.5*merger[1]))):
#creates a new sheet for the passed in workbook
new_sheet = wb.create_sheet()
#increment the sheet count by 1
sheet_count +=1
#/' aren't allowed in excel sheet names, so we replace them with '-' if the name contains '/'
new_sheet.title = option_data[1].replace('/', '-')
#zip creates a tuple pair for each item of the passed in lists. this tuple can then be appended to the sheet
for data in zip(option_data_labels,option_data):
new_sheet.append(data)
#loop through every value of total_data_headers and add it to the worksheet at the specified data_header_row
for (index, value) in enumerate(total_data_headers, start= 1) :
new_sheet.cell(row = data_header_row,column = index ).value = value
#add the BDH formula to the sheet
new_sheet['B{}'.format(data_header_row+1)] = abxl.add_option_BDH( security_name = option_data[0],
fields = data_table_header,
start_date = data_sheet[sheet_start_date_cell].value,
end_date = 'B4',
optional_arg = BDH_optional_arg,
optional_val = BDH_optional_val)
total_rows -= 1
wb.save(reference_wb_path)
print('Added {} new sheets to the workbook'.format(sheet_count))
def update_workbook_days_till_expiration(reference_wb_path, data_start_row, date_col, calculation_col):
warnings.warn('''Completely removed''',DeprecationWarning)
#loads the workbook
wb = openpyxl.load_workbook(reference_wb_path)
#converts date_col and calculation_col if passed as letters
date_col=convert_to_numbers(date_col)
calculation_col = convert_to_numbers(calculation_col)
#loop through all the sheets in the workbook
for (index, sheet_name) in enumerate(wb.get_sheet_names()):
#if the sheet_name matches an option sheet:
if re.match(OPTION_SHEET_PATTERN_INT, sheet_name) or re.match(OPTION_SHEET_PATTERN_FLOAT, sheet_name):
#get the sheet
sheet = wb.get_sheet_by_name(sheet_name)
update_sheet_days_till_expiration(reference_sheet= sheet, data_start_row= data_start_row,
date_col= date_col, calculation_col= calculation_col)
#save changes
wb.save(reference_wb_path)
def update_sheet_days_till_expiration(reference_sheet, data_start_row, date_col, calculation_col):
warnings.warn('''Completely removed''',DeprecationWarning)
#sets the total rows of the worksheet
total_rows = reference_sheet.max_row
#sets the expiratioin date
exp_date = reference_sheet['B4'].value
#sets the header of the column
reference_sheet.cell(row=data_start_row-1, column=calculation_col).value = 'DTE'
#loops thehrough each row from data_start_row till total_rows
for i in range(data_start_row, total_rows+1):
if reference_sheet.cell(row=i, column=date_col).value == None:
break
else:
curr_date = reference_sheet.cell(row=i, column=date_col).value
reference_sheet.cell(row=i, column=calculation_col).value = days_till_expiration(start_date=curr_date,
expiration_date=exp_date)
def days_till_expiration(start_date, expiration_date):
warnings.warn('''Removed. Yet to be reimplimented''',DeprecationWarning)
return (expiration_date-start_date).days
| true | true |
f73d4018da427d7cf2e97adebdc875ce2b8e8f52 | 3,320 | py | Python | passbook/sources/ldap/migrations/0001_initial.py | fossabot/passbook | cba17f6659404445ac3025f11657d89368cc8b4f | [
"MIT"
] | null | null | null | passbook/sources/ldap/migrations/0001_initial.py | fossabot/passbook | cba17f6659404445ac3025f11657d89368cc8b4f | [
"MIT"
] | null | null | null | passbook/sources/ldap/migrations/0001_initial.py | fossabot/passbook | cba17f6659404445ac3025f11657d89368cc8b4f | [
"MIT"
] | null | null | null | # Generated by Django 2.2.6 on 2019-10-08 20:43
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("passbook_core", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="LDAPPropertyMapping",
fields=[
(
"propertymapping_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="passbook_core.PropertyMapping",
),
),
("ldap_property", models.TextField()),
("object_field", models.TextField()),
],
options={"abstract": False,},
bases=("passbook_core.propertymapping",),
),
migrations.CreateModel(
name="LDAPSource",
fields=[
(
"source_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="passbook_core.Source",
),
),
(
"server_uri",
models.URLField(
validators=[
django.core.validators.URLValidator(
schemes=["ldap", "ldaps"]
)
]
),
),
("bind_cn", models.TextField()),
("bind_password", models.TextField()),
("start_tls", models.BooleanField(default=False)),
("base_dn", models.TextField()),
(
"additional_user_dn",
models.TextField(
help_text="Prepended to Base DN for User-queries."
),
),
(
"additional_group_dn",
models.TextField(
help_text="Prepended to Base DN for Group-queries."
),
),
("user_object_filter", models.TextField()),
("group_object_filter", models.TextField()),
("sync_groups", models.BooleanField(default=True)),
(
"sync_parent_group",
models.ForeignKey(
blank=True,
default=None,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="passbook_core.Group",
),
),
],
options={
"verbose_name": "LDAP Source",
"verbose_name_plural": "LDAP Sources",
},
bases=("passbook_core.source",),
),
]
| 34.226804 | 75 | 0.413253 |
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("passbook_core", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="LDAPPropertyMapping",
fields=[
(
"propertymapping_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="passbook_core.PropertyMapping",
),
),
("ldap_property", models.TextField()),
("object_field", models.TextField()),
],
options={"abstract": False,},
bases=("passbook_core.propertymapping",),
),
migrations.CreateModel(
name="LDAPSource",
fields=[
(
"source_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="passbook_core.Source",
),
),
(
"server_uri",
models.URLField(
validators=[
django.core.validators.URLValidator(
schemes=["ldap", "ldaps"]
)
]
),
),
("bind_cn", models.TextField()),
("bind_password", models.TextField()),
("start_tls", models.BooleanField(default=False)),
("base_dn", models.TextField()),
(
"additional_user_dn",
models.TextField(
help_text="Prepended to Base DN for User-queries."
),
),
(
"additional_group_dn",
models.TextField(
help_text="Prepended to Base DN for Group-queries."
),
),
("user_object_filter", models.TextField()),
("group_object_filter", models.TextField()),
("sync_groups", models.BooleanField(default=True)),
(
"sync_parent_group",
models.ForeignKey(
blank=True,
default=None,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="passbook_core.Group",
),
),
],
options={
"verbose_name": "LDAP Source",
"verbose_name_plural": "LDAP Sources",
},
bases=("passbook_core.source",),
),
]
| true | true |
f73d404d8e57e68853eb007001c653562b62b002 | 102,721 | py | Python | packages/python/plotly/plotly/graph_objs/_funnel.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/graph_objs/_funnel.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/graph_objs/_funnel.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | from plotly.basedatatypes import BaseTraceType as _BaseTraceType
import copy as _copy
class Funnel(_BaseTraceType):
# class properties
# --------------------
_parent_path_str = ""
_path_str = "funnel"
_valid_props = {
"alignmentgroup",
"cliponaxis",
"connector",
"constraintext",
"customdata",
"customdatasrc",
"dx",
"dy",
"hoverinfo",
"hoverinfosrc",
"hoverlabel",
"hovertemplate",
"hovertemplatesrc",
"hovertext",
"hovertextsrc",
"ids",
"idssrc",
"insidetextanchor",
"insidetextfont",
"legendgroup",
"legendgrouptitle",
"legendrank",
"marker",
"meta",
"metasrc",
"name",
"offset",
"offsetgroup",
"opacity",
"orientation",
"outsidetextfont",
"selectedpoints",
"showlegend",
"stream",
"text",
"textangle",
"textfont",
"textinfo",
"textposition",
"textpositionsrc",
"textsrc",
"texttemplate",
"texttemplatesrc",
"type",
"uid",
"uirevision",
"visible",
"width",
"x",
"x0",
"xaxis",
"xhoverformat",
"xperiod",
"xperiod0",
"xperiodalignment",
"xsrc",
"y",
"y0",
"yaxis",
"yhoverformat",
"yperiod",
"yperiod0",
"yperiodalignment",
"ysrc",
}
# alignmentgroup
# --------------
@property
def alignmentgroup(self):
"""
Set several traces linked to the same position axis or matching
axes to the same alignmentgroup. This controls whether bars
compute their positional range dependently or independently.
The 'alignmentgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["alignmentgroup"]
@alignmentgroup.setter
def alignmentgroup(self, val):
self["alignmentgroup"] = val
# cliponaxis
# ----------
@property
def cliponaxis(self):
"""
Determines whether the text nodes are clipped about the subplot
axes. To show the text nodes above axis lines and tick labels,
make sure to set `xaxis.layer` and `yaxis.layer` to *below
traces*.
The 'cliponaxis' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["cliponaxis"]
@cliponaxis.setter
def cliponaxis(self, val):
self["cliponaxis"] = val
# connector
# ---------
@property
def connector(self):
"""
The 'connector' property is an instance of Connector
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Connector`
- A dict of string/value properties that will be passed
to the Connector constructor
Supported dict properties:
fillcolor
Sets the fill color.
line
:class:`plotly.graph_objects.funnel.connector.L
ine` instance or dict with compatible
properties
visible
Determines if connector regions and lines are
drawn.
Returns
-------
plotly.graph_objs.funnel.Connector
"""
return self["connector"]
@connector.setter
def connector(self, val):
self["connector"] = val
# constraintext
# -------------
@property
def constraintext(self):
"""
Constrain the size of text inside or outside a bar to be no
larger than the bar itself.
The 'constraintext' property is an enumeration that may be specified as:
- One of the following enumeration values:
['inside', 'outside', 'both', 'none']
Returns
-------
Any
"""
return self["constraintext"]
@constraintext.setter
def constraintext(self, val):
self["constraintext"] = val
# customdata
# ----------
@property
def customdata(self):
"""
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note that,
"scatter" traces also appends customdata items in the markers
DOM elements
The 'customdata' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["customdata"]
@customdata.setter
def customdata(self, val):
self["customdata"] = val
# customdatasrc
# -------------
@property
def customdatasrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`customdata`.
The 'customdatasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["customdatasrc"]
@customdatasrc.setter
def customdatasrc(self, val):
self["customdatasrc"] = val
# dx
# --
@property
def dx(self):
"""
Sets the x coordinate step. See `x0` for more info.
The 'dx' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["dx"]
@dx.setter
def dx(self, val):
self["dx"] = val
# dy
# --
@property
def dy(self):
"""
Sets the y coordinate step. See `y0` for more info.
The 'dy' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["dy"]
@dy.setter
def dy(self, val):
self["dy"] = val
# hoverinfo
# ---------
@property
def hoverinfo(self):
"""
Determines which trace information appear on hover. If `none`
or `skip` are set, no information is displayed upon hovering.
But, if `none` is set, click and hover events are still fired.
The 'hoverinfo' property is a flaglist and may be specified
as a string containing:
- Any combination of ['name', 'x', 'y', 'text', 'percent initial', 'percent previous', 'percent total'] joined with '+' characters
(e.g. 'name+x')
OR exactly one of ['all', 'none', 'skip'] (e.g. 'skip')
- A list or array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["hoverinfo"]
@hoverinfo.setter
def hoverinfo(self, val):
self["hoverinfo"] = val
# hoverinfosrc
# ------------
@property
def hoverinfosrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
The 'hoverinfosrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hoverinfosrc"]
@hoverinfosrc.setter
def hoverinfosrc(self, val):
self["hoverinfosrc"] = val
# hoverlabel
# ----------
@property
def hoverlabel(self):
"""
The 'hoverlabel' property is an instance of Hoverlabel
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Hoverlabel`
- A dict of string/value properties that will be passed
to the Hoverlabel constructor
Supported dict properties:
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for `align`.
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for `bgcolor`.
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for `bordercolor`.
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for `namelength`.
Returns
-------
plotly.graph_objs.funnel.Hoverlabel
"""
return self["hoverlabel"]
@hoverlabel.setter
def hoverlabel(self, val):
self["hoverlabel"] = val
# hovertemplate
# -------------
@property
def hovertemplate(self):
"""
Template string used for rendering the information that appear
on hover box. Note that this will override `hoverinfo`.
Variables are inserted using %{variable}, for example "y: %{y}"
as well as %{xother}, {%_xother}, {%_xother_}, {%xother_}. When
showing info for several points, "xother" will be added to
those with different x positions from the first point. An
underscore before or after "(x|y)other" will add a space on
that side, only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format for
details on the formatting syntax. Dates are formatted using
d3-time-format's syntax %{variable|d3-time-format}, for example
"Day: %{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the date
formatting syntax. The variables available in `hovertemplate`
are the ones emitted as event data described at this link
https://plotly.com/javascript/plotlyjs-events/#event-data.
Additionally, every attributes that can be specified per-point
(the ones that are `arrayOk: true`) are available. variables
`percentInitial`, `percentPrevious` and `percentTotal`.
Anything contained in tag `<extra>` is displayed in the
secondary box, for example "<extra>{fullData.name}</extra>". To
hide the secondary box completely, use an empty tag
`<extra></extra>`.
The 'hovertemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertemplate"]
@hovertemplate.setter
def hovertemplate(self, val):
self["hovertemplate"] = val
# hovertemplatesrc
# ----------------
@property
def hovertemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
The 'hovertemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertemplatesrc"]
@hovertemplatesrc.setter
def hovertemplatesrc(self, val):
self["hovertemplatesrc"] = val
# hovertext
# ---------
@property
def hovertext(self):
"""
Sets hover text elements associated with each (x,y) pair. If a
single string, the same string appears over all the data
points. If an array of string, the items are mapped in order to
the this trace's (x,y) coordinates. To be seen, trace
`hoverinfo` must contain a "text" flag.
The 'hovertext' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertext"]
@hovertext.setter
def hovertext(self, val):
self["hovertext"] = val
# hovertextsrc
# ------------
@property
def hovertextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hovertext`.
The 'hovertextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertextsrc"]
@hovertextsrc.setter
def hovertextsrc(self, val):
self["hovertextsrc"] = val
# ids
# ---
@property
def ids(self):
"""
Assigns id labels to each datum. These ids for object constancy
of data points during animation. Should be an array of strings,
not numbers or any other type.
The 'ids' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ids"]
@ids.setter
def ids(self, val):
self["ids"] = val
# idssrc
# ------
@property
def idssrc(self):
"""
Sets the source reference on Chart Studio Cloud for `ids`.
The 'idssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["idssrc"]
@idssrc.setter
def idssrc(self, val):
self["idssrc"] = val
# insidetextanchor
# ----------------
@property
def insidetextanchor(self):
"""
Determines if texts are kept at center or start/end points in
`textposition` "inside" mode.
The 'insidetextanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['end', 'middle', 'start']
Returns
-------
Any
"""
return self["insidetextanchor"]
@insidetextanchor.setter
def insidetextanchor(self, val):
self["insidetextanchor"] = val
# insidetextfont
# --------------
@property
def insidetextfont(self):
"""
Sets the font used for `text` lying inside the bar.
The 'insidetextfont' property is an instance of Insidetextfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Insidetextfont`
- A dict of string/value properties that will be passed
to the Insidetextfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
Returns
-------
plotly.graph_objs.funnel.Insidetextfont
"""
return self["insidetextfont"]
@insidetextfont.setter
def insidetextfont(self, val):
self["insidetextfont"] = val
# legendgroup
# -----------
@property
def legendgroup(self):
"""
Sets the legend group for this trace. Traces part of the same
legend group hide/show at the same time when toggling legend
items.
The 'legendgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["legendgroup"]
@legendgroup.setter
def legendgroup(self, val):
self["legendgroup"] = val
# legendgrouptitle
# ----------------
@property
def legendgrouptitle(self):
"""
The 'legendgrouptitle' property is an instance of Legendgrouptitle
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Legendgrouptitle`
- A dict of string/value properties that will be passed
to the Legendgrouptitle constructor
Supported dict properties:
font
Sets this legend group's title font.
text
Sets the title of the legend group.
Returns
-------
plotly.graph_objs.funnel.Legendgrouptitle
"""
return self["legendgrouptitle"]
@legendgrouptitle.setter
def legendgrouptitle(self, val):
self["legendgrouptitle"] = val
# legendrank
# ----------
@property
def legendrank(self):
"""
Sets the legend rank for this trace. Items and groups with
smaller ranks are presented on top/left side while with
`*reversed* `legend.traceorder` they are on bottom/right side.
The default legendrank is 1000, so that you can use ranks less
than 1000 to place certain items before all unranked items, and
ranks greater than 1000 to go after all unranked items.
The 'legendrank' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["legendrank"]
@legendrank.setter
def legendrank(self, val):
self["legendrank"] = val
# marker
# ------
@property
def marker(self):
"""
The 'marker' property is an instance of Marker
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Marker`
- A dict of string/value properties that will be passed
to the Marker constructor
Supported dict properties:
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.colorscale`. Has an
effect only if in `marker.color`is set to a
numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.color`) or the bounds set in
`marker.cmin` and `marker.cmax` Has an effect
only if in `marker.color`is set to a numerical
array. Defaults to `false` when `marker.cmin`
and `marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.cmin` and/or `marker.cmax` to
be equidistant to this point. Has an effect
only if in `marker.color`is set to a numerical
array. Value should have the same units as in
`marker.color`. Has no effect when
`marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmax` must be set as well.
color
Sets themarkercolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.cmin` and `marker.cmax` if set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorbar
:class:`plotly.graph_objects.funnel.marker.Colo
rBar` instance or dict with compatible
properties
colorscale
Sets the colorscale. Has an effect only if in
`marker.color`is set to a numerical array. The
colorscale must be an array containing arrays
mapping a normalized value to an rgb, rgba,
hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`.
To control the bounds of the colorscale in
color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may
be a palette name string of the following list:
Blackbody,Bluered,Blues,Cividis,Earth,Electric,
Greens,Greys,Hot,Jet,Picnic,Portland,Rainbow,Rd
Bu,Reds,Viridis,YlGnBu,YlOrRd.
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
line
:class:`plotly.graph_objects.funnel.marker.Line
` instance or dict with compatible properties
opacity
Sets the opacity of the bars.
opacitysrc
Sets the source reference on Chart Studio Cloud
for `opacity`.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.color`is set to a
numerical array. If true, `marker.cmin` will
correspond to the last color in the array and
`marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is
displayed for this trace. Has an effect only if
in `marker.color`is set to a numerical array.
Returns
-------
plotly.graph_objs.funnel.Marker
"""
return self["marker"]
@marker.setter
def marker(self, val):
self["marker"] = val
# meta
# ----
@property
def meta(self):
"""
Assigns extra meta information associated with this trace that
can be used in various text attributes. Attributes such as
trace `name`, graph, axis and colorbar `title.text`, annotation
`text` `rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta` values in
an attribute in the same trace, simply use `%{meta[i]}` where
`i` is the index or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or key of the
`meta` and `n` is the trace index.
The 'meta' property accepts values of any type
Returns
-------
Any|numpy.ndarray
"""
return self["meta"]
@meta.setter
def meta(self, val):
self["meta"] = val
# metasrc
# -------
@property
def metasrc(self):
"""
Sets the source reference on Chart Studio Cloud for `meta`.
The 'metasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["metasrc"]
@metasrc.setter
def metasrc(self, val):
self["metasrc"] = val
# name
# ----
@property
def name(self):
"""
Sets the trace name. The trace name appear as the legend item
and on hover.
The 'name' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["name"]
@name.setter
def name(self, val):
self["name"] = val
# offset
# ------
@property
def offset(self):
"""
Shifts the position where the bar is drawn (in position axis
units). In "group" barmode, traces that set "offset" will be
excluded and drawn in "overlay" mode instead.
The 'offset' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["offset"]
@offset.setter
def offset(self, val):
self["offset"] = val
# offsetgroup
# -----------
@property
def offsetgroup(self):
"""
Set several traces linked to the same position axis or matching
axes to the same offsetgroup where bars of the same position
coordinate will line up.
The 'offsetgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["offsetgroup"]
@offsetgroup.setter
def offsetgroup(self, val):
self["offsetgroup"] = val
# opacity
# -------
@property
def opacity(self):
"""
Sets the opacity of the trace.
The 'opacity' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["opacity"]
@opacity.setter
def opacity(self, val):
self["opacity"] = val
# orientation
# -----------
@property
def orientation(self):
"""
Sets the orientation of the funnels. With "v" ("h"), the value
of the each bar spans along the vertical (horizontal). By
default funnels are tend to be oriented horizontally; unless
only "y" array is presented or orientation is set to "v". Also
regarding graphs including only 'horizontal' funnels,
"autorange" on the "y-axis" are set to "reversed".
The 'orientation' property is an enumeration that may be specified as:
- One of the following enumeration values:
['v', 'h']
Returns
-------
Any
"""
return self["orientation"]
@orientation.setter
def orientation(self, val):
self["orientation"] = val
# outsidetextfont
# ---------------
@property
def outsidetextfont(self):
"""
Sets the font used for `text` lying outside the bar.
The 'outsidetextfont' property is an instance of Outsidetextfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Outsidetextfont`
- A dict of string/value properties that will be passed
to the Outsidetextfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
Returns
-------
plotly.graph_objs.funnel.Outsidetextfont
"""
return self["outsidetextfont"]
@outsidetextfont.setter
def outsidetextfont(self, val):
self["outsidetextfont"] = val
# selectedpoints
# --------------
@property
def selectedpoints(self):
"""
Array containing integer indices of selected points. Has an
effect only for traces that support selections. Note that an
empty array means an empty selection where the `unselected` are
turned on for all points, whereas, any other non-array values
means no selection all where the `selected` and `unselected`
styles have no effect.
The 'selectedpoints' property accepts values of any type
Returns
-------
Any
"""
return self["selectedpoints"]
@selectedpoints.setter
def selectedpoints(self, val):
self["selectedpoints"] = val
# showlegend
# ----------
@property
def showlegend(self):
"""
Determines whether or not an item corresponding to this trace
is shown in the legend.
The 'showlegend' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showlegend"]
@showlegend.setter
def showlegend(self, val):
self["showlegend"] = val
# stream
# ------
@property
def stream(self):
"""
The 'stream' property is an instance of Stream
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Stream`
- A dict of string/value properties that will be passed
to the Stream constructor
Supported dict properties:
maxpoints
Sets the maximum number of points to keep on
the plots from an incoming stream. If
`maxpoints` is set to 50, only the newest 50
points will be displayed on the plot.
token
The stream id number links a data trace on a
plot with a stream. See https://chart-
studio.plotly.com/settings for more details.
Returns
-------
plotly.graph_objs.funnel.Stream
"""
return self["stream"]
@stream.setter
def stream(self, val):
self["stream"] = val
# text
# ----
@property
def text(self):
"""
Sets text elements associated with each (x,y) pair. If a single
string, the same string appears over all the data points. If an
array of string, the items are mapped in order to the this
trace's (x,y) coordinates. If trace `hoverinfo` contains a
"text" flag and "hovertext" is not set, these elements will be
seen in the hover labels.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# textangle
# ---------
@property
def textangle(self):
"""
Sets the angle of the tick labels with respect to the bar. For
example, a `tickangle` of -90 draws the tick labels vertically.
With "auto" the texts may automatically be rotated to fit with
the maximum size in bars.
The 'textangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180. Numeric values outside this
range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self["textangle"]
@textangle.setter
def textangle(self, val):
self["textangle"] = val
# textfont
# --------
@property
def textfont(self):
"""
Sets the font used for `text`.
The 'textfont' property is an instance of Textfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.funnel.Textfont`
- A dict of string/value properties that will be passed
to the Textfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
Returns
-------
plotly.graph_objs.funnel.Textfont
"""
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# textinfo
# --------
@property
def textinfo(self):
"""
Determines which trace information appear on the graph. In the
case of having multiple funnels, percentages & totals are
computed separately (per trace).
The 'textinfo' property is a flaglist and may be specified
as a string containing:
- Any combination of ['label', 'text', 'percent initial', 'percent previous', 'percent total', 'value'] joined with '+' characters
(e.g. 'label+text')
OR exactly one of ['none'] (e.g. 'none')
Returns
-------
Any
"""
return self["textinfo"]
@textinfo.setter
def textinfo(self, val):
self["textinfo"] = val
# textposition
# ------------
@property
def textposition(self):
"""
Specifies the location of the `text`. "inside" positions `text`
inside, next to the bar end (rotated and scaled if needed).
"outside" positions `text` outside, next to the bar end (scaled
if needed), unless there is another bar stacked on this one,
then the text gets pushed inside. "auto" tries to position
`text` inside the bar, but if the bar is too small and no bar
is stacked on this one the text is moved outside. If "none", no
text appears.
The 'textposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['inside', 'outside', 'auto', 'none']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["textposition"]
@textposition.setter
def textposition(self, val):
self["textposition"] = val
# textpositionsrc
# ---------------
@property
def textpositionsrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`textposition`.
The 'textpositionsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textpositionsrc"]
@textpositionsrc.setter
def textpositionsrc(self, val):
self["textpositionsrc"] = val
# textsrc
# -------
@property
def textsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `text`.
The 'textsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textsrc"]
@textsrc.setter
def textsrc(self, val):
self["textsrc"] = val
# texttemplate
# ------------
@property
def texttemplate(self):
"""
Template string used for rendering the information text that
appear on points. Note that this will override `textinfo`.
Variables are inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's syntax
%{variable:d3-format}, for example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format for
details on the formatting syntax. Dates are formatted using
d3-time-format's syntax %{variable|d3-time-format}, for example
"Day: %{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the date
formatting syntax. Every attributes that can be specified per-
point (the ones that are `arrayOk: true`) are available.
variables `percentInitial`, `percentPrevious`, `percentTotal`,
`label` and `value`.
The 'texttemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["texttemplate"]
@texttemplate.setter
def texttemplate(self, val):
self["texttemplate"] = val
# texttemplatesrc
# ---------------
@property
def texttemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
The 'texttemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["texttemplatesrc"]
@texttemplatesrc.setter
def texttemplatesrc(self, val):
self["texttemplatesrc"] = val
# uid
# ---
@property
def uid(self):
"""
Assign an id to this trace, Use this to provide object
constancy between traces during animations and transitions.
The 'uid' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["uid"]
@uid.setter
def uid(self, val):
self["uid"] = val
# uirevision
# ----------
@property
def uirevision(self):
"""
Controls persistence of some user-driven changes to the trace:
`constraintrange` in `parcoords` traces, as well as some
`editable: true` modifications such as `name` and
`colorbar.title`. Defaults to `layout.uirevision`. Note that
other user-driven trace attribute changes are controlled by
`layout` attributes: `trace.visible` is controlled by
`layout.legend.uirevision`, `selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)` (accessible
with `config: {editable: true}`) is controlled by
`layout.editrevision`. Trace changes are tracked by `uid`,
which only falls back on trace index if no `uid` is provided.
So if your app can add/remove traces before the end of the
`data` array, such that the same trace has a different index,
you can still preserve user-driven changes if you give each
trace a `uid` that stays with it as it moves.
The 'uirevision' property accepts values of any type
Returns
-------
Any
"""
return self["uirevision"]
@uirevision.setter
def uirevision(self, val):
self["uirevision"] = val
# visible
# -------
@property
def visible(self):
"""
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as a
legend item (provided that the legend itself is visible).
The 'visible' property is an enumeration that may be specified as:
- One of the following enumeration values:
[True, False, 'legendonly']
Returns
-------
Any
"""
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# width
# -----
@property
def width(self):
"""
Sets the bar width (in position axis units).
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# x
# -
@property
def x(self):
"""
Sets the x coordinates.
The 'x' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# x0
# --
@property
def x0(self):
"""
Alternate to `x`. Builds a linear space of x coordinates. Use
with `dx` where `x0` is the starting coordinate and `dx` the
step.
The 'x0' property accepts values of any type
Returns
-------
Any
"""
return self["x0"]
@x0.setter
def x0(self, val):
self["x0"] = val
# xaxis
# -----
@property
def xaxis(self):
"""
Sets a reference between this trace's x coordinates and a 2D
cartesian x axis. If "x" (the default value), the x coordinates
refer to `layout.xaxis`. If "x2", the x coordinates refer to
`layout.xaxis2`, and so on.
The 'xaxis' property is an identifier of a particular
subplot, of type 'x', that may be specified as the string 'x'
optionally followed by an integer >= 1
(e.g. 'x', 'x1', 'x2', 'x3', etc.)
Returns
-------
str
"""
return self["xaxis"]
@xaxis.setter
def xaxis(self, val):
self["xaxis"] = val
# xhoverformat
# ------------
@property
def xhoverformat(self):
"""
Sets the hover text formatting rulefor `x` using d3 formatting
mini-languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display *09~15~23.46*By default the values
are formatted using `xaxis.hoverformat`.
The 'xhoverformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["xhoverformat"]
@xhoverformat.setter
def xhoverformat(self, val):
self["xhoverformat"] = val
# xperiod
# -------
@property
def xperiod(self):
"""
Only relevant when the axis `type` is "date". Sets the period
positioning in milliseconds or "M<n>" on the x axis. Special
values in the form of "M<n>" could be used to declare the
number of months. In this case `n` must be a positive integer.
The 'xperiod' property accepts values of any type
Returns
-------
Any
"""
return self["xperiod"]
@xperiod.setter
def xperiod(self, val):
self["xperiod"] = val
# xperiod0
# --------
@property
def xperiod0(self):
"""
Only relevant when the axis `type` is "date". Sets the base for
period positioning in milliseconds or date string on the x0
axis. When `x0period` is round number of weeks, the `x0period0`
by default would be on a Sunday i.e. 2000-01-02, otherwise it
would be at 2000-01-01.
The 'xperiod0' property accepts values of any type
Returns
-------
Any
"""
return self["xperiod0"]
@xperiod0.setter
def xperiod0(self, val):
self["xperiod0"] = val
# xperiodalignment
# ----------------
@property
def xperiodalignment(self):
"""
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the x axis.
The 'xperiodalignment' property is an enumeration that may be specified as:
- One of the following enumeration values:
['start', 'middle', 'end']
Returns
-------
Any
"""
return self["xperiodalignment"]
@xperiodalignment.setter
def xperiodalignment(self, val):
self["xperiodalignment"] = val
# xsrc
# ----
@property
def xsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `x`.
The 'xsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["xsrc"]
@xsrc.setter
def xsrc(self, val):
self["xsrc"] = val
# y
# -
@property
def y(self):
"""
Sets the y coordinates.
The 'y' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# y0
# --
@property
def y0(self):
"""
Alternate to `y`. Builds a linear space of y coordinates. Use
with `dy` where `y0` is the starting coordinate and `dy` the
step.
The 'y0' property accepts values of any type
Returns
-------
Any
"""
return self["y0"]
@y0.setter
def y0(self, val):
self["y0"] = val
# yaxis
# -----
@property
def yaxis(self):
"""
Sets a reference between this trace's y coordinates and a 2D
cartesian y axis. If "y" (the default value), the y coordinates
refer to `layout.yaxis`. If "y2", the y coordinates refer to
`layout.yaxis2`, and so on.
The 'yaxis' property is an identifier of a particular
subplot, of type 'y', that may be specified as the string 'y'
optionally followed by an integer >= 1
(e.g. 'y', 'y1', 'y2', 'y3', etc.)
Returns
-------
str
"""
return self["yaxis"]
@yaxis.setter
def yaxis(self, val):
self["yaxis"] = val
# yhoverformat
# ------------
@property
def yhoverformat(self):
"""
Sets the hover text formatting rulefor `y` using d3 formatting
mini-languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display *09~15~23.46*By default the values
are formatted using `yaxis.hoverformat`.
The 'yhoverformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["yhoverformat"]
@yhoverformat.setter
def yhoverformat(self, val):
self["yhoverformat"] = val
# yperiod
# -------
@property
def yperiod(self):
"""
Only relevant when the axis `type` is "date". Sets the period
positioning in milliseconds or "M<n>" on the y axis. Special
values in the form of "M<n>" could be used to declare the
number of months. In this case `n` must be a positive integer.
The 'yperiod' property accepts values of any type
Returns
-------
Any
"""
return self["yperiod"]
@yperiod.setter
def yperiod(self, val):
self["yperiod"] = val
# yperiod0
# --------
@property
def yperiod0(self):
"""
Only relevant when the axis `type` is "date". Sets the base for
period positioning in milliseconds or date string on the y0
axis. When `y0period` is round number of weeks, the `y0period0`
by default would be on a Sunday i.e. 2000-01-02, otherwise it
would be at 2000-01-01.
The 'yperiod0' property accepts values of any type
Returns
-------
Any
"""
return self["yperiod0"]
@yperiod0.setter
def yperiod0(self, val):
self["yperiod0"] = val
# yperiodalignment
# ----------------
@property
def yperiodalignment(self):
"""
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the y axis.
The 'yperiodalignment' property is an enumeration that may be specified as:
- One of the following enumeration values:
['start', 'middle', 'end']
Returns
-------
Any
"""
return self["yperiodalignment"]
@yperiodalignment.setter
def yperiodalignment(self, val):
self["yperiodalignment"] = val
# ysrc
# ----
@property
def ysrc(self):
"""
Sets the source reference on Chart Studio Cloud for `y`.
The 'ysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ysrc"]
@ysrc.setter
def ysrc(self, val):
self["ysrc"] = val
# type
# ----
@property
def type(self):
return self._props["type"]
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
alignmentgroup
Set several traces linked to the same position axis or
matching axes to the same alignmentgroup. This controls
whether bars compute their positional range dependently
or independently.
cliponaxis
Determines whether the text nodes are clipped about the
subplot axes. To show the text nodes above axis lines
and tick labels, make sure to set `xaxis.layer` and
`yaxis.layer` to *below traces*.
connector
:class:`plotly.graph_objects.funnel.Connector` instance
or dict with compatible properties
constraintext
Constrain the size of text inside or outside a bar to
be no larger than the bar itself.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
`customdata`.
dx
Sets the x coordinate step. See `x0` for more info.
dy
Sets the y coordinate step. See `y0` for more info.
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
hoverlabel
:class:`plotly.graph_objects.funnel.Hoverlabel`
instance or dict with compatible properties
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious` and `percentTotal`. Anything
contained in tag `<extra>` is displayed in the
secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
hovertext
Sets hover text elements associated with each (x,y)
pair. If a single string, the same string appears over
all the data points. If an array of string, the items
are mapped in order to the this trace's (x,y)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
`hovertext`.
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
`ids`.
insidetextanchor
Determines if texts are kept at center or start/end
points in `textposition` "inside" mode.
insidetextfont
Sets the font used for `text` lying inside the bar.
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.funnel.Legendgrouptitle`
instance or dict with compatible properties
legendrank
Sets the legend rank for this trace. Items and groups
with smaller ranks are presented on top/left side while
with `*reversed* `legend.traceorder` they are on
bottom/right side. The default legendrank is 1000, so
that you can use ranks less than 1000 to place certain
items before all unranked items, and ranks greater than
1000 to go after all unranked items.
marker
:class:`plotly.graph_objects.funnel.Marker` instance or
dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
`meta`.
name
Sets the trace name. The trace name appear as the
legend item and on hover.
offset
Shifts the position where the bar is drawn (in position
axis units). In "group" barmode, traces that set
"offset" will be excluded and drawn in "overlay" mode
instead.
offsetgroup
Set several traces linked to the same position axis or
matching axes to the same offsetgroup where bars of the
same position coordinate will line up.
opacity
Sets the opacity of the trace.
orientation
Sets the orientation of the funnels. With "v" ("h"),
the value of the each bar spans along the vertical
(horizontal). By default funnels are tend to be
oriented horizontally; unless only "y" array is
presented or orientation is set to "v". Also regarding
graphs including only 'horizontal' funnels, "autorange"
on the "y-axis" are set to "reversed".
outsidetextfont
Sets the font used for `text` lying outside the bar.
selectedpoints
Array containing integer indices of selected points.
Has an effect only for traces that support selections.
Note that an empty array means an empty selection where
the `unselected` are turned on for all points, whereas,
any other non-array values means no selection all where
the `selected` and `unselected` styles have no effect.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stream
:class:`plotly.graph_objects.funnel.Stream` instance or
dict with compatible properties
text
Sets text elements associated with each (x,y) pair. If
a single string, the same string appears over all the
data points. If an array of string, the items are
mapped in order to the this trace's (x,y) coordinates.
If trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be seen in
the hover labels.
textangle
Sets the angle of the tick labels with respect to the
bar. For example, a `tickangle` of -90 draws the tick
labels vertically. With "auto" the texts may
automatically be rotated to fit with the maximum size
in bars.
textfont
Sets the font used for `text`.
textinfo
Determines which trace information appear on the graph.
In the case of having multiple funnels, percentages &
totals are computed separately (per trace).
textposition
Specifies the location of the `text`. "inside"
positions `text` inside, next to the bar end (rotated
and scaled if needed). "outside" positions `text`
outside, next to the bar end (scaled if needed), unless
there is another bar stacked on this one, then the text
gets pushed inside. "auto" tries to position `text`
inside the bar, but if the bar is too small and no bar
is stacked on this one the text is moved outside. If
"none", no text appears.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
`textposition`.
textsrc
Sets the source reference on Chart Studio Cloud for
`text`.
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious`, `percentTotal`, `label` and `value`.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
width
Sets the bar width (in position axis units).
x
Sets the x coordinates.
x0
Alternate to `x`. Builds a linear space of x
coordinates. Use with `dx` where `x0` is the starting
coordinate and `dx` the step.
xaxis
Sets a reference between this trace's x coordinates and
a 2D cartesian x axis. If "x" (the default value), the
x coordinates refer to `layout.xaxis`. If "x2", the x
coordinates refer to `layout.xaxis2`, and so on.
xhoverformat
Sets the hover text formatting rulefor `x` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `xaxis.hoverformat`.
xperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the x
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
xperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the x0 axis. When `x0period` is round number
of weeks, the `x0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
xperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the x axis.
xsrc
Sets the source reference on Chart Studio Cloud for
`x`.
y
Sets the y coordinates.
y0
Alternate to `y`. Builds a linear space of y
coordinates. Use with `dy` where `y0` is the starting
coordinate and `dy` the step.
yaxis
Sets a reference between this trace's y coordinates and
a 2D cartesian y axis. If "y" (the default value), the
y coordinates refer to `layout.yaxis`. If "y2", the y
coordinates refer to `layout.yaxis2`, and so on.
yhoverformat
Sets the hover text formatting rulefor `y` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `yaxis.hoverformat`.
yperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the y
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
yperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the y0 axis. When `y0period` is round number
of weeks, the `y0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
yperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the y axis.
ysrc
Sets the source reference on Chart Studio Cloud for
`y`.
"""
def __init__(
self,
arg=None,
alignmentgroup=None,
cliponaxis=None,
connector=None,
constraintext=None,
customdata=None,
customdatasrc=None,
dx=None,
dy=None,
hoverinfo=None,
hoverinfosrc=None,
hoverlabel=None,
hovertemplate=None,
hovertemplatesrc=None,
hovertext=None,
hovertextsrc=None,
ids=None,
idssrc=None,
insidetextanchor=None,
insidetextfont=None,
legendgroup=None,
legendgrouptitle=None,
legendrank=None,
marker=None,
meta=None,
metasrc=None,
name=None,
offset=None,
offsetgroup=None,
opacity=None,
orientation=None,
outsidetextfont=None,
selectedpoints=None,
showlegend=None,
stream=None,
text=None,
textangle=None,
textfont=None,
textinfo=None,
textposition=None,
textpositionsrc=None,
textsrc=None,
texttemplate=None,
texttemplatesrc=None,
uid=None,
uirevision=None,
visible=None,
width=None,
x=None,
x0=None,
xaxis=None,
xhoverformat=None,
xperiod=None,
xperiod0=None,
xperiodalignment=None,
xsrc=None,
y=None,
y0=None,
yaxis=None,
yhoverformat=None,
yperiod=None,
yperiod0=None,
yperiodalignment=None,
ysrc=None,
**kwargs,
):
"""
Construct a new Funnel object
Visualize stages in a process using length-encoded bars. This
trace can be used to show data in either a part-to-whole
representation wherein each item appears in a single stage, or
in a "drop-off" representation wherein each item appears in
each stage it traversed. See also the "funnelarea" trace type
for a different approach to visualizing funnel data.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.Funnel`
alignmentgroup
Set several traces linked to the same position axis or
matching axes to the same alignmentgroup. This controls
whether bars compute their positional range dependently
or independently.
cliponaxis
Determines whether the text nodes are clipped about the
subplot axes. To show the text nodes above axis lines
and tick labels, make sure to set `xaxis.layer` and
`yaxis.layer` to *below traces*.
connector
:class:`plotly.graph_objects.funnel.Connector` instance
or dict with compatible properties
constraintext
Constrain the size of text inside or outside a bar to
be no larger than the bar itself.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
`customdata`.
dx
Sets the x coordinate step. See `x0` for more info.
dy
Sets the y coordinate step. See `y0` for more info.
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
hoverlabel
:class:`plotly.graph_objects.funnel.Hoverlabel`
instance or dict with compatible properties
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious` and `percentTotal`. Anything
contained in tag `<extra>` is displayed in the
secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
hovertext
Sets hover text elements associated with each (x,y)
pair. If a single string, the same string appears over
all the data points. If an array of string, the items
are mapped in order to the this trace's (x,y)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
`hovertext`.
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
`ids`.
insidetextanchor
Determines if texts are kept at center or start/end
points in `textposition` "inside" mode.
insidetextfont
Sets the font used for `text` lying inside the bar.
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.funnel.Legendgrouptitle`
instance or dict with compatible properties
legendrank
Sets the legend rank for this trace. Items and groups
with smaller ranks are presented on top/left side while
with `*reversed* `legend.traceorder` they are on
bottom/right side. The default legendrank is 1000, so
that you can use ranks less than 1000 to place certain
items before all unranked items, and ranks greater than
1000 to go after all unranked items.
marker
:class:`plotly.graph_objects.funnel.Marker` instance or
dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
`meta`.
name
Sets the trace name. The trace name appear as the
legend item and on hover.
offset
Shifts the position where the bar is drawn (in position
axis units). In "group" barmode, traces that set
"offset" will be excluded and drawn in "overlay" mode
instead.
offsetgroup
Set several traces linked to the same position axis or
matching axes to the same offsetgroup where bars of the
same position coordinate will line up.
opacity
Sets the opacity of the trace.
orientation
Sets the orientation of the funnels. With "v" ("h"),
the value of the each bar spans along the vertical
(horizontal). By default funnels are tend to be
oriented horizontally; unless only "y" array is
presented or orientation is set to "v". Also regarding
graphs including only 'horizontal' funnels, "autorange"
on the "y-axis" are set to "reversed".
outsidetextfont
Sets the font used for `text` lying outside the bar.
selectedpoints
Array containing integer indices of selected points.
Has an effect only for traces that support selections.
Note that an empty array means an empty selection where
the `unselected` are turned on for all points, whereas,
any other non-array values means no selection all where
the `selected` and `unselected` styles have no effect.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stream
:class:`plotly.graph_objects.funnel.Stream` instance or
dict with compatible properties
text
Sets text elements associated with each (x,y) pair. If
a single string, the same string appears over all the
data points. If an array of string, the items are
mapped in order to the this trace's (x,y) coordinates.
If trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be seen in
the hover labels.
textangle
Sets the angle of the tick labels with respect to the
bar. For example, a `tickangle` of -90 draws the tick
labels vertically. With "auto" the texts may
automatically be rotated to fit with the maximum size
in bars.
textfont
Sets the font used for `text`.
textinfo
Determines which trace information appear on the graph.
In the case of having multiple funnels, percentages &
totals are computed separately (per trace).
textposition
Specifies the location of the `text`. "inside"
positions `text` inside, next to the bar end (rotated
and scaled if needed). "outside" positions `text`
outside, next to the bar end (scaled if needed), unless
there is another bar stacked on this one, then the text
gets pushed inside. "auto" tries to position `text`
inside the bar, but if the bar is too small and no bar
is stacked on this one the text is moved outside. If
"none", no text appears.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
`textposition`.
textsrc
Sets the source reference on Chart Studio Cloud for
`text`.
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious`, `percentTotal`, `label` and `value`.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
width
Sets the bar width (in position axis units).
x
Sets the x coordinates.
x0
Alternate to `x`. Builds a linear space of x
coordinates. Use with `dx` where `x0` is the starting
coordinate and `dx` the step.
xaxis
Sets a reference between this trace's x coordinates and
a 2D cartesian x axis. If "x" (the default value), the
x coordinates refer to `layout.xaxis`. If "x2", the x
coordinates refer to `layout.xaxis2`, and so on.
xhoverformat
Sets the hover text formatting rulefor `x` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `xaxis.hoverformat`.
xperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the x
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
xperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the x0 axis. When `x0period` is round number
of weeks, the `x0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
xperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the x axis.
xsrc
Sets the source reference on Chart Studio Cloud for
`x`.
y
Sets the y coordinates.
y0
Alternate to `y`. Builds a linear space of y
coordinates. Use with `dy` where `y0` is the starting
coordinate and `dy` the step.
yaxis
Sets a reference between this trace's y coordinates and
a 2D cartesian y axis. If "y" (the default value), the
y coordinates refer to `layout.yaxis`. If "y2", the y
coordinates refer to `layout.yaxis2`, and so on.
yhoverformat
Sets the hover text formatting rulefor `y` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `yaxis.hoverformat`.
yperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the y
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
yperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the y0 axis. When `y0period` is round number
of weeks, the `y0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
yperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the y axis.
ysrc
Sets the source reference on Chart Studio Cloud for
`y`.
Returns
-------
Funnel
"""
super(Funnel, self).__init__("funnel")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.Funnel
constructor must be a dict or
an instance of :class:`plotly.graph_objs.Funnel`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("alignmentgroup", None)
_v = alignmentgroup if alignmentgroup is not None else _v
if _v is not None:
self["alignmentgroup"] = _v
_v = arg.pop("cliponaxis", None)
_v = cliponaxis if cliponaxis is not None else _v
if _v is not None:
self["cliponaxis"] = _v
_v = arg.pop("connector", None)
_v = connector if connector is not None else _v
if _v is not None:
self["connector"] = _v
_v = arg.pop("constraintext", None)
_v = constraintext if constraintext is not None else _v
if _v is not None:
self["constraintext"] = _v
_v = arg.pop("customdata", None)
_v = customdata if customdata is not None else _v
if _v is not None:
self["customdata"] = _v
_v = arg.pop("customdatasrc", None)
_v = customdatasrc if customdatasrc is not None else _v
if _v is not None:
self["customdatasrc"] = _v
_v = arg.pop("dx", None)
_v = dx if dx is not None else _v
if _v is not None:
self["dx"] = _v
_v = arg.pop("dy", None)
_v = dy if dy is not None else _v
if _v is not None:
self["dy"] = _v
_v = arg.pop("hoverinfo", None)
_v = hoverinfo if hoverinfo is not None else _v
if _v is not None:
self["hoverinfo"] = _v
_v = arg.pop("hoverinfosrc", None)
_v = hoverinfosrc if hoverinfosrc is not None else _v
if _v is not None:
self["hoverinfosrc"] = _v
_v = arg.pop("hoverlabel", None)
_v = hoverlabel if hoverlabel is not None else _v
if _v is not None:
self["hoverlabel"] = _v
_v = arg.pop("hovertemplate", None)
_v = hovertemplate if hovertemplate is not None else _v
if _v is not None:
self["hovertemplate"] = _v
_v = arg.pop("hovertemplatesrc", None)
_v = hovertemplatesrc if hovertemplatesrc is not None else _v
if _v is not None:
self["hovertemplatesrc"] = _v
_v = arg.pop("hovertext", None)
_v = hovertext if hovertext is not None else _v
if _v is not None:
self["hovertext"] = _v
_v = arg.pop("hovertextsrc", None)
_v = hovertextsrc if hovertextsrc is not None else _v
if _v is not None:
self["hovertextsrc"] = _v
_v = arg.pop("ids", None)
_v = ids if ids is not None else _v
if _v is not None:
self["ids"] = _v
_v = arg.pop("idssrc", None)
_v = idssrc if idssrc is not None else _v
if _v is not None:
self["idssrc"] = _v
_v = arg.pop("insidetextanchor", None)
_v = insidetextanchor if insidetextanchor is not None else _v
if _v is not None:
self["insidetextanchor"] = _v
_v = arg.pop("insidetextfont", None)
_v = insidetextfont if insidetextfont is not None else _v
if _v is not None:
self["insidetextfont"] = _v
_v = arg.pop("legendgroup", None)
_v = legendgroup if legendgroup is not None else _v
if _v is not None:
self["legendgroup"] = _v
_v = arg.pop("legendgrouptitle", None)
_v = legendgrouptitle if legendgrouptitle is not None else _v
if _v is not None:
self["legendgrouptitle"] = _v
_v = arg.pop("legendrank", None)
_v = legendrank if legendrank is not None else _v
if _v is not None:
self["legendrank"] = _v
_v = arg.pop("marker", None)
_v = marker if marker is not None else _v
if _v is not None:
self["marker"] = _v
_v = arg.pop("meta", None)
_v = meta if meta is not None else _v
if _v is not None:
self["meta"] = _v
_v = arg.pop("metasrc", None)
_v = metasrc if metasrc is not None else _v
if _v is not None:
self["metasrc"] = _v
_v = arg.pop("name", None)
_v = name if name is not None else _v
if _v is not None:
self["name"] = _v
_v = arg.pop("offset", None)
_v = offset if offset is not None else _v
if _v is not None:
self["offset"] = _v
_v = arg.pop("offsetgroup", None)
_v = offsetgroup if offsetgroup is not None else _v
if _v is not None:
self["offsetgroup"] = _v
_v = arg.pop("opacity", None)
_v = opacity if opacity is not None else _v
if _v is not None:
self["opacity"] = _v
_v = arg.pop("orientation", None)
_v = orientation if orientation is not None else _v
if _v is not None:
self["orientation"] = _v
_v = arg.pop("outsidetextfont", None)
_v = outsidetextfont if outsidetextfont is not None else _v
if _v is not None:
self["outsidetextfont"] = _v
_v = arg.pop("selectedpoints", None)
_v = selectedpoints if selectedpoints is not None else _v
if _v is not None:
self["selectedpoints"] = _v
_v = arg.pop("showlegend", None)
_v = showlegend if showlegend is not None else _v
if _v is not None:
self["showlegend"] = _v
_v = arg.pop("stream", None)
_v = stream if stream is not None else _v
if _v is not None:
self["stream"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
_v = arg.pop("textangle", None)
_v = textangle if textangle is not None else _v
if _v is not None:
self["textangle"] = _v
_v = arg.pop("textfont", None)
_v = textfont if textfont is not None else _v
if _v is not None:
self["textfont"] = _v
_v = arg.pop("textinfo", None)
_v = textinfo if textinfo is not None else _v
if _v is not None:
self["textinfo"] = _v
_v = arg.pop("textposition", None)
_v = textposition if textposition is not None else _v
if _v is not None:
self["textposition"] = _v
_v = arg.pop("textpositionsrc", None)
_v = textpositionsrc if textpositionsrc is not None else _v
if _v is not None:
self["textpositionsrc"] = _v
_v = arg.pop("textsrc", None)
_v = textsrc if textsrc is not None else _v
if _v is not None:
self["textsrc"] = _v
_v = arg.pop("texttemplate", None)
_v = texttemplate if texttemplate is not None else _v
if _v is not None:
self["texttemplate"] = _v
_v = arg.pop("texttemplatesrc", None)
_v = texttemplatesrc if texttemplatesrc is not None else _v
if _v is not None:
self["texttemplatesrc"] = _v
_v = arg.pop("uid", None)
_v = uid if uid is not None else _v
if _v is not None:
self["uid"] = _v
_v = arg.pop("uirevision", None)
_v = uirevision if uirevision is not None else _v
if _v is not None:
self["uirevision"] = _v
_v = arg.pop("visible", None)
_v = visible if visible is not None else _v
if _v is not None:
self["visible"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("x0", None)
_v = x0 if x0 is not None else _v
if _v is not None:
self["x0"] = _v
_v = arg.pop("xaxis", None)
_v = xaxis if xaxis is not None else _v
if _v is not None:
self["xaxis"] = _v
_v = arg.pop("xhoverformat", None)
_v = xhoverformat if xhoverformat is not None else _v
if _v is not None:
self["xhoverformat"] = _v
_v = arg.pop("xperiod", None)
_v = xperiod if xperiod is not None else _v
if _v is not None:
self["xperiod"] = _v
_v = arg.pop("xperiod0", None)
_v = xperiod0 if xperiod0 is not None else _v
if _v is not None:
self["xperiod0"] = _v
_v = arg.pop("xperiodalignment", None)
_v = xperiodalignment if xperiodalignment is not None else _v
if _v is not None:
self["xperiodalignment"] = _v
_v = arg.pop("xsrc", None)
_v = xsrc if xsrc is not None else _v
if _v is not None:
self["xsrc"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("y0", None)
_v = y0 if y0 is not None else _v
if _v is not None:
self["y0"] = _v
_v = arg.pop("yaxis", None)
_v = yaxis if yaxis is not None else _v
if _v is not None:
self["yaxis"] = _v
_v = arg.pop("yhoverformat", None)
_v = yhoverformat if yhoverformat is not None else _v
if _v is not None:
self["yhoverformat"] = _v
_v = arg.pop("yperiod", None)
_v = yperiod if yperiod is not None else _v
if _v is not None:
self["yperiod"] = _v
_v = arg.pop("yperiod0", None)
_v = yperiod0 if yperiod0 is not None else _v
if _v is not None:
self["yperiod0"] = _v
_v = arg.pop("yperiodalignment", None)
_v = yperiodalignment if yperiodalignment is not None else _v
if _v is not None:
self["yperiodalignment"] = _v
_v = arg.pop("ysrc", None)
_v = ysrc if ysrc is not None else _v
if _v is not None:
self["ysrc"] = _v
# Read-only literals
# ------------------
self._props["type"] = "funnel"
arg.pop("type", None)
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 35.592862 | 140 | 0.565084 | from plotly.basedatatypes import BaseTraceType as _BaseTraceType
import copy as _copy
class Funnel(_BaseTraceType):
_parent_path_str = ""
_path_str = "funnel"
_valid_props = {
"alignmentgroup",
"cliponaxis",
"connector",
"constraintext",
"customdata",
"customdatasrc",
"dx",
"dy",
"hoverinfo",
"hoverinfosrc",
"hoverlabel",
"hovertemplate",
"hovertemplatesrc",
"hovertext",
"hovertextsrc",
"ids",
"idssrc",
"insidetextanchor",
"insidetextfont",
"legendgroup",
"legendgrouptitle",
"legendrank",
"marker",
"meta",
"metasrc",
"name",
"offset",
"offsetgroup",
"opacity",
"orientation",
"outsidetextfont",
"selectedpoints",
"showlegend",
"stream",
"text",
"textangle",
"textfont",
"textinfo",
"textposition",
"textpositionsrc",
"textsrc",
"texttemplate",
"texttemplatesrc",
"type",
"uid",
"uirevision",
"visible",
"width",
"x",
"x0",
"xaxis",
"xhoverformat",
"xperiod",
"xperiod0",
"xperiodalignment",
"xsrc",
"y",
"y0",
"yaxis",
"yhoverformat",
"yperiod",
"yperiod0",
"yperiodalignment",
"ysrc",
}
@property
def alignmentgroup(self):
return self["alignmentgroup"]
@alignmentgroup.setter
def alignmentgroup(self, val):
self["alignmentgroup"] = val
@property
def cliponaxis(self):
return self["cliponaxis"]
@cliponaxis.setter
def cliponaxis(self, val):
self["cliponaxis"] = val
@property
def connector(self):
return self["connector"]
@connector.setter
def connector(self, val):
self["connector"] = val
@property
def constraintext(self):
return self["constraintext"]
@constraintext.setter
def constraintext(self, val):
self["constraintext"] = val
@property
def customdata(self):
return self["customdata"]
@customdata.setter
def customdata(self, val):
self["customdata"] = val
@property
def customdatasrc(self):
return self["customdatasrc"]
@customdatasrc.setter
def customdatasrc(self, val):
self["customdatasrc"] = val
@property
def dx(self):
return self["dx"]
@dx.setter
def dx(self, val):
self["dx"] = val
@property
def dy(self):
return self["dy"]
@dy.setter
def dy(self, val):
self["dy"] = val
@property
def hoverinfo(self):
return self["hoverinfo"]
@hoverinfo.setter
def hoverinfo(self, val):
self["hoverinfo"] = val
@property
def hoverinfosrc(self):
return self["hoverinfosrc"]
@hoverinfosrc.setter
def hoverinfosrc(self, val):
self["hoverinfosrc"] = val
@property
def hoverlabel(self):
return self["hoverlabel"]
@hoverlabel.setter
def hoverlabel(self, val):
self["hoverlabel"] = val
@property
def hovertemplate(self):
return self["hovertemplate"]
@hovertemplate.setter
def hovertemplate(self, val):
self["hovertemplate"] = val
@property
def hovertemplatesrc(self):
return self["hovertemplatesrc"]
@hovertemplatesrc.setter
def hovertemplatesrc(self, val):
self["hovertemplatesrc"] = val
@property
def hovertext(self):
return self["hovertext"]
@hovertext.setter
def hovertext(self, val):
self["hovertext"] = val
@property
def hovertextsrc(self):
return self["hovertextsrc"]
@hovertextsrc.setter
def hovertextsrc(self, val):
self["hovertextsrc"] = val
@property
def ids(self):
return self["ids"]
@ids.setter
def ids(self, val):
self["ids"] = val
@property
def idssrc(self):
return self["idssrc"]
@idssrc.setter
def idssrc(self, val):
self["idssrc"] = val
@property
def insidetextanchor(self):
return self["insidetextanchor"]
@insidetextanchor.setter
def insidetextanchor(self, val):
self["insidetextanchor"] = val
@property
def insidetextfont(self):
return self["insidetextfont"]
@insidetextfont.setter
def insidetextfont(self, val):
self["insidetextfont"] = val
@property
def legendgroup(self):
return self["legendgroup"]
@legendgroup.setter
def legendgroup(self, val):
self["legendgroup"] = val
@property
def legendgrouptitle(self):
return self["legendgrouptitle"]
@legendgrouptitle.setter
def legendgrouptitle(self, val):
self["legendgrouptitle"] = val
@property
def legendrank(self):
return self["legendrank"]
@legendrank.setter
def legendrank(self, val):
self["legendrank"] = val
@property
def marker(self):
return self["marker"]
@marker.setter
def marker(self, val):
self["marker"] = val
@property
def meta(self):
return self["meta"]
@meta.setter
def meta(self, val):
self["meta"] = val
@property
def metasrc(self):
return self["metasrc"]
@metasrc.setter
def metasrc(self, val):
self["metasrc"] = val
@property
def name(self):
return self["name"]
@name.setter
def name(self, val):
self["name"] = val
@property
def offset(self):
return self["offset"]
@offset.setter
def offset(self, val):
self["offset"] = val
@property
def offsetgroup(self):
return self["offsetgroup"]
@offsetgroup.setter
def offsetgroup(self, val):
self["offsetgroup"] = val
@property
def opacity(self):
return self["opacity"]
@opacity.setter
def opacity(self, val):
self["opacity"] = val
@property
def orientation(self):
return self["orientation"]
@orientation.setter
def orientation(self, val):
self["orientation"] = val
@property
def outsidetextfont(self):
return self["outsidetextfont"]
@outsidetextfont.setter
def outsidetextfont(self, val):
self["outsidetextfont"] = val
@property
def selectedpoints(self):
return self["selectedpoints"]
@selectedpoints.setter
def selectedpoints(self, val):
self["selectedpoints"] = val
@property
def showlegend(self):
return self["showlegend"]
@showlegend.setter
def showlegend(self, val):
self["showlegend"] = val
@property
def stream(self):
return self["stream"]
@stream.setter
def stream(self, val):
self["stream"] = val
@property
def text(self):
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
@property
def textangle(self):
return self["textangle"]
@textangle.setter
def textangle(self, val):
self["textangle"] = val
@property
def textfont(self):
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
@property
def textinfo(self):
return self["textinfo"]
@textinfo.setter
def textinfo(self, val):
self["textinfo"] = val
@property
def textposition(self):
return self["textposition"]
@textposition.setter
def textposition(self, val):
self["textposition"] = val
@property
def textpositionsrc(self):
return self["textpositionsrc"]
@textpositionsrc.setter
def textpositionsrc(self, val):
self["textpositionsrc"] = val
@property
def textsrc(self):
return self["textsrc"]
@textsrc.setter
def textsrc(self, val):
self["textsrc"] = val
@property
def texttemplate(self):
return self["texttemplate"]
@texttemplate.setter
def texttemplate(self, val):
self["texttemplate"] = val
@property
def texttemplatesrc(self):
return self["texttemplatesrc"]
@texttemplatesrc.setter
def texttemplatesrc(self, val):
self["texttemplatesrc"] = val
@property
def uid(self):
return self["uid"]
@uid.setter
def uid(self, val):
self["uid"] = val
@property
def uirevision(self):
return self["uirevision"]
@uirevision.setter
def uirevision(self, val):
self["uirevision"] = val
@property
def visible(self):
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
@property
def width(self):
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
@property
def x(self):
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
@property
def x0(self):
return self["x0"]
@x0.setter
def x0(self, val):
self["x0"] = val
@property
def xaxis(self):
return self["xaxis"]
@xaxis.setter
def xaxis(self, val):
self["xaxis"] = val
@property
def xhoverformat(self):
return self["xhoverformat"]
@xhoverformat.setter
def xhoverformat(self, val):
self["xhoverformat"] = val
@property
def xperiod(self):
return self["xperiod"]
@xperiod.setter
def xperiod(self, val):
self["xperiod"] = val
@property
def xperiod0(self):
return self["xperiod0"]
@xperiod0.setter
def xperiod0(self, val):
self["xperiod0"] = val
@property
def xperiodalignment(self):
return self["xperiodalignment"]
@xperiodalignment.setter
def xperiodalignment(self, val):
self["xperiodalignment"] = val
@property
def xsrc(self):
return self["xsrc"]
@xsrc.setter
def xsrc(self, val):
self["xsrc"] = val
@property
def y(self):
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
@property
def y0(self):
return self["y0"]
@y0.setter
def y0(self, val):
self["y0"] = val
@property
def yaxis(self):
return self["yaxis"]
@yaxis.setter
def yaxis(self, val):
self["yaxis"] = val
@property
def yhoverformat(self):
return self["yhoverformat"]
@yhoverformat.setter
def yhoverformat(self, val):
self["yhoverformat"] = val
@property
def yperiod(self):
return self["yperiod"]
@yperiod.setter
def yperiod(self, val):
self["yperiod"] = val
@property
def yperiod0(self):
return self["yperiod0"]
@yperiod0.setter
def yperiod0(self, val):
self["yperiod0"] = val
@property
def yperiodalignment(self):
return self["yperiodalignment"]
@yperiodalignment.setter
def yperiodalignment(self, val):
self["yperiodalignment"] = val
@property
def ysrc(self):
return self["ysrc"]
@ysrc.setter
def ysrc(self, val):
self["ysrc"] = val
@property
def type(self):
return self._props["type"]
@property
def _prop_descriptions(self):
return """\
alignmentgroup
Set several traces linked to the same position axis or
matching axes to the same alignmentgroup. This controls
whether bars compute their positional range dependently
or independently.
cliponaxis
Determines whether the text nodes are clipped about the
subplot axes. To show the text nodes above axis lines
and tick labels, make sure to set `xaxis.layer` and
`yaxis.layer` to *below traces*.
connector
:class:`plotly.graph_objects.funnel.Connector` instance
or dict with compatible properties
constraintext
Constrain the size of text inside or outside a bar to
be no larger than the bar itself.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
`customdata`.
dx
Sets the x coordinate step. See `x0` for more info.
dy
Sets the y coordinate step. See `y0` for more info.
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
hoverlabel
:class:`plotly.graph_objects.funnel.Hoverlabel`
instance or dict with compatible properties
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious` and `percentTotal`. Anything
contained in tag `<extra>` is displayed in the
secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
hovertext
Sets hover text elements associated with each (x,y)
pair. If a single string, the same string appears over
all the data points. If an array of string, the items
are mapped in order to the this trace's (x,y)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
`hovertext`.
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
`ids`.
insidetextanchor
Determines if texts are kept at center or start/end
points in `textposition` "inside" mode.
insidetextfont
Sets the font used for `text` lying inside the bar.
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.funnel.Legendgrouptitle`
instance or dict with compatible properties
legendrank
Sets the legend rank for this trace. Items and groups
with smaller ranks are presented on top/left side while
with `*reversed* `legend.traceorder` they are on
bottom/right side. The default legendrank is 1000, so
that you can use ranks less than 1000 to place certain
items before all unranked items, and ranks greater than
1000 to go after all unranked items.
marker
:class:`plotly.graph_objects.funnel.Marker` instance or
dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
`meta`.
name
Sets the trace name. The trace name appear as the
legend item and on hover.
offset
Shifts the position where the bar is drawn (in position
axis units). In "group" barmode, traces that set
"offset" will be excluded and drawn in "overlay" mode
instead.
offsetgroup
Set several traces linked to the same position axis or
matching axes to the same offsetgroup where bars of the
same position coordinate will line up.
opacity
Sets the opacity of the trace.
orientation
Sets the orientation of the funnels. With "v" ("h"),
the value of the each bar spans along the vertical
(horizontal). By default funnels are tend to be
oriented horizontally; unless only "y" array is
presented or orientation is set to "v". Also regarding
graphs including only 'horizontal' funnels, "autorange"
on the "y-axis" are set to "reversed".
outsidetextfont
Sets the font used for `text` lying outside the bar.
selectedpoints
Array containing integer indices of selected points.
Has an effect only for traces that support selections.
Note that an empty array means an empty selection where
the `unselected` are turned on for all points, whereas,
any other non-array values means no selection all where
the `selected` and `unselected` styles have no effect.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stream
:class:`plotly.graph_objects.funnel.Stream` instance or
dict with compatible properties
text
Sets text elements associated with each (x,y) pair. If
a single string, the same string appears over all the
data points. If an array of string, the items are
mapped in order to the this trace's (x,y) coordinates.
If trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be seen in
the hover labels.
textangle
Sets the angle of the tick labels with respect to the
bar. For example, a `tickangle` of -90 draws the tick
labels vertically. With "auto" the texts may
automatically be rotated to fit with the maximum size
in bars.
textfont
Sets the font used for `text`.
textinfo
Determines which trace information appear on the graph.
In the case of having multiple funnels, percentages &
totals are computed separately (per trace).
textposition
Specifies the location of the `text`. "inside"
positions `text` inside, next to the bar end (rotated
and scaled if needed). "outside" positions `text`
outside, next to the bar end (scaled if needed), unless
there is another bar stacked on this one, then the text
gets pushed inside. "auto" tries to position `text`
inside the bar, but if the bar is too small and no bar
is stacked on this one the text is moved outside. If
"none", no text appears.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
`textposition`.
textsrc
Sets the source reference on Chart Studio Cloud for
`text`.
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `percentInitial`,
`percentPrevious`, `percentTotal`, `label` and `value`.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
width
Sets the bar width (in position axis units).
x
Sets the x coordinates.
x0
Alternate to `x`. Builds a linear space of x
coordinates. Use with `dx` where `x0` is the starting
coordinate and `dx` the step.
xaxis
Sets a reference between this trace's x coordinates and
a 2D cartesian x axis. If "x" (the default value), the
x coordinates refer to `layout.xaxis`. If "x2", the x
coordinates refer to `layout.xaxis2`, and so on.
xhoverformat
Sets the hover text formatting rulefor `x` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `xaxis.hoverformat`.
xperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the x
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
xperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the x0 axis. When `x0period` is round number
of weeks, the `x0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
xperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the x axis.
xsrc
Sets the source reference on Chart Studio Cloud for
`x`.
y
Sets the y coordinates.
y0
Alternate to `y`. Builds a linear space of y
coordinates. Use with `dy` where `y0` is the starting
coordinate and `dy` the step.
yaxis
Sets a reference between this trace's y coordinates and
a 2D cartesian y axis. If "y" (the default value), the
y coordinates refer to `layout.yaxis`. If "y2", the y
coordinates refer to `layout.yaxis2`, and so on.
yhoverformat
Sets the hover text formatting rulefor `y` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `yaxis.hoverformat`.
yperiod
Only relevant when the axis `type` is "date". Sets the
period positioning in milliseconds or "M<n>" on the y
axis. Special values in the form of "M<n>" could be
used to declare the number of months. In this case `n`
must be a positive integer.
yperiod0
Only relevant when the axis `type` is "date". Sets the
base for period positioning in milliseconds or date
string on the y0 axis. When `y0period` is round number
of weeks, the `y0period0` by default would be on a
Sunday i.e. 2000-01-02, otherwise it would be at
2000-01-01.
yperiodalignment
Only relevant when the axis `type` is "date". Sets the
alignment of data points on the y axis.
ysrc
Sets the source reference on Chart Studio Cloud for
`y`.
"""
def __init__(
self,
arg=None,
alignmentgroup=None,
cliponaxis=None,
connector=None,
constraintext=None,
customdata=None,
customdatasrc=None,
dx=None,
dy=None,
hoverinfo=None,
hoverinfosrc=None,
hoverlabel=None,
hovertemplate=None,
hovertemplatesrc=None,
hovertext=None,
hovertextsrc=None,
ids=None,
idssrc=None,
insidetextanchor=None,
insidetextfont=None,
legendgroup=None,
legendgrouptitle=None,
legendrank=None,
marker=None,
meta=None,
metasrc=None,
name=None,
offset=None,
offsetgroup=None,
opacity=None,
orientation=None,
outsidetextfont=None,
selectedpoints=None,
showlegend=None,
stream=None,
text=None,
textangle=None,
textfont=None,
textinfo=None,
textposition=None,
textpositionsrc=None,
textsrc=None,
texttemplate=None,
texttemplatesrc=None,
uid=None,
uirevision=None,
visible=None,
width=None,
x=None,
x0=None,
xaxis=None,
xhoverformat=None,
xperiod=None,
xperiod0=None,
xperiodalignment=None,
xsrc=None,
y=None,
y0=None,
yaxis=None,
yhoverformat=None,
yperiod=None,
yperiod0=None,
yperiodalignment=None,
ysrc=None,
**kwargs,
):
super(Funnel, self).__init__("funnel")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.Funnel
constructor must be a dict or
an instance of :class:`plotly.graph_objs.Funnel`"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
_v = arg.pop("alignmentgroup", None)
_v = alignmentgroup if alignmentgroup is not None else _v
if _v is not None:
self["alignmentgroup"] = _v
_v = arg.pop("cliponaxis", None)
_v = cliponaxis if cliponaxis is not None else _v
if _v is not None:
self["cliponaxis"] = _v
_v = arg.pop("connector", None)
_v = connector if connector is not None else _v
if _v is not None:
self["connector"] = _v
_v = arg.pop("constraintext", None)
_v = constraintext if constraintext is not None else _v
if _v is not None:
self["constraintext"] = _v
_v = arg.pop("customdata", None)
_v = customdata if customdata is not None else _v
if _v is not None:
self["customdata"] = _v
_v = arg.pop("customdatasrc", None)
_v = customdatasrc if customdatasrc is not None else _v
if _v is not None:
self["customdatasrc"] = _v
_v = arg.pop("dx", None)
_v = dx if dx is not None else _v
if _v is not None:
self["dx"] = _v
_v = arg.pop("dy", None)
_v = dy if dy is not None else _v
if _v is not None:
self["dy"] = _v
_v = arg.pop("hoverinfo", None)
_v = hoverinfo if hoverinfo is not None else _v
if _v is not None:
self["hoverinfo"] = _v
_v = arg.pop("hoverinfosrc", None)
_v = hoverinfosrc if hoverinfosrc is not None else _v
if _v is not None:
self["hoverinfosrc"] = _v
_v = arg.pop("hoverlabel", None)
_v = hoverlabel if hoverlabel is not None else _v
if _v is not None:
self["hoverlabel"] = _v
_v = arg.pop("hovertemplate", None)
_v = hovertemplate if hovertemplate is not None else _v
if _v is not None:
self["hovertemplate"] = _v
_v = arg.pop("hovertemplatesrc", None)
_v = hovertemplatesrc if hovertemplatesrc is not None else _v
if _v is not None:
self["hovertemplatesrc"] = _v
_v = arg.pop("hovertext", None)
_v = hovertext if hovertext is not None else _v
if _v is not None:
self["hovertext"] = _v
_v = arg.pop("hovertextsrc", None)
_v = hovertextsrc if hovertextsrc is not None else _v
if _v is not None:
self["hovertextsrc"] = _v
_v = arg.pop("ids", None)
_v = ids if ids is not None else _v
if _v is not None:
self["ids"] = _v
_v = arg.pop("idssrc", None)
_v = idssrc if idssrc is not None else _v
if _v is not None:
self["idssrc"] = _v
_v = arg.pop("insidetextanchor", None)
_v = insidetextanchor if insidetextanchor is not None else _v
if _v is not None:
self["insidetextanchor"] = _v
_v = arg.pop("insidetextfont", None)
_v = insidetextfont if insidetextfont is not None else _v
if _v is not None:
self["insidetextfont"] = _v
_v = arg.pop("legendgroup", None)
_v = legendgroup if legendgroup is not None else _v
if _v is not None:
self["legendgroup"] = _v
_v = arg.pop("legendgrouptitle", None)
_v = legendgrouptitle if legendgrouptitle is not None else _v
if _v is not None:
self["legendgrouptitle"] = _v
_v = arg.pop("legendrank", None)
_v = legendrank if legendrank is not None else _v
if _v is not None:
self["legendrank"] = _v
_v = arg.pop("marker", None)
_v = marker if marker is not None else _v
if _v is not None:
self["marker"] = _v
_v = arg.pop("meta", None)
_v = meta if meta is not None else _v
if _v is not None:
self["meta"] = _v
_v = arg.pop("metasrc", None)
_v = metasrc if metasrc is not None else _v
if _v is not None:
self["metasrc"] = _v
_v = arg.pop("name", None)
_v = name if name is not None else _v
if _v is not None:
self["name"] = _v
_v = arg.pop("offset", None)
_v = offset if offset is not None else _v
if _v is not None:
self["offset"] = _v
_v = arg.pop("offsetgroup", None)
_v = offsetgroup if offsetgroup is not None else _v
if _v is not None:
self["offsetgroup"] = _v
_v = arg.pop("opacity", None)
_v = opacity if opacity is not None else _v
if _v is not None:
self["opacity"] = _v
_v = arg.pop("orientation", None)
_v = orientation if orientation is not None else _v
if _v is not None:
self["orientation"] = _v
_v = arg.pop("outsidetextfont", None)
_v = outsidetextfont if outsidetextfont is not None else _v
if _v is not None:
self["outsidetextfont"] = _v
_v = arg.pop("selectedpoints", None)
_v = selectedpoints if selectedpoints is not None else _v
if _v is not None:
self["selectedpoints"] = _v
_v = arg.pop("showlegend", None)
_v = showlegend if showlegend is not None else _v
if _v is not None:
self["showlegend"] = _v
_v = arg.pop("stream", None)
_v = stream if stream is not None else _v
if _v is not None:
self["stream"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
_v = arg.pop("textangle", None)
_v = textangle if textangle is not None else _v
if _v is not None:
self["textangle"] = _v
_v = arg.pop("textfont", None)
_v = textfont if textfont is not None else _v
if _v is not None:
self["textfont"] = _v
_v = arg.pop("textinfo", None)
_v = textinfo if textinfo is not None else _v
if _v is not None:
self["textinfo"] = _v
_v = arg.pop("textposition", None)
_v = textposition if textposition is not None else _v
if _v is not None:
self["textposition"] = _v
_v = arg.pop("textpositionsrc", None)
_v = textpositionsrc if textpositionsrc is not None else _v
if _v is not None:
self["textpositionsrc"] = _v
_v = arg.pop("textsrc", None)
_v = textsrc if textsrc is not None else _v
if _v is not None:
self["textsrc"] = _v
_v = arg.pop("texttemplate", None)
_v = texttemplate if texttemplate is not None else _v
if _v is not None:
self["texttemplate"] = _v
_v = arg.pop("texttemplatesrc", None)
_v = texttemplatesrc if texttemplatesrc is not None else _v
if _v is not None:
self["texttemplatesrc"] = _v
_v = arg.pop("uid", None)
_v = uid if uid is not None else _v
if _v is not None:
self["uid"] = _v
_v = arg.pop("uirevision", None)
_v = uirevision if uirevision is not None else _v
if _v is not None:
self["uirevision"] = _v
_v = arg.pop("visible", None)
_v = visible if visible is not None else _v
if _v is not None:
self["visible"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("x0", None)
_v = x0 if x0 is not None else _v
if _v is not None:
self["x0"] = _v
_v = arg.pop("xaxis", None)
_v = xaxis if xaxis is not None else _v
if _v is not None:
self["xaxis"] = _v
_v = arg.pop("xhoverformat", None)
_v = xhoverformat if xhoverformat is not None else _v
if _v is not None:
self["xhoverformat"] = _v
_v = arg.pop("xperiod", None)
_v = xperiod if xperiod is not None else _v
if _v is not None:
self["xperiod"] = _v
_v = arg.pop("xperiod0", None)
_v = xperiod0 if xperiod0 is not None else _v
if _v is not None:
self["xperiod0"] = _v
_v = arg.pop("xperiodalignment", None)
_v = xperiodalignment if xperiodalignment is not None else _v
if _v is not None:
self["xperiodalignment"] = _v
_v = arg.pop("xsrc", None)
_v = xsrc if xsrc is not None else _v
if _v is not None:
self["xsrc"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("y0", None)
_v = y0 if y0 is not None else _v
if _v is not None:
self["y0"] = _v
_v = arg.pop("yaxis", None)
_v = yaxis if yaxis is not None else _v
if _v is not None:
self["yaxis"] = _v
_v = arg.pop("yhoverformat", None)
_v = yhoverformat if yhoverformat is not None else _v
if _v is not None:
self["yhoverformat"] = _v
_v = arg.pop("yperiod", None)
_v = yperiod if yperiod is not None else _v
if _v is not None:
self["yperiod"] = _v
_v = arg.pop("yperiod0", None)
_v = yperiod0 if yperiod0 is not None else _v
if _v is not None:
self["yperiod0"] = _v
_v = arg.pop("yperiodalignment", None)
_v = yperiodalignment if yperiodalignment is not None else _v
if _v is not None:
self["yperiodalignment"] = _v
_v = arg.pop("ysrc", None)
_v = ysrc if ysrc is not None else _v
if _v is not None:
self["ysrc"] = _v
self._props["type"] = "funnel"
arg.pop("type", None)
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| true | true |
f73d40bb82c0208472099724a0f1c8fc32ee555e | 259 | py | Python | invenio_users_resources/records/mappings/v6/__init__.py | max-moser/invenio-users-resources | fcba667d4fcbf5df9cd0dcbd8118c041a1add6e7 | [
"MIT"
] | null | null | null | invenio_users_resources/records/mappings/v6/__init__.py | max-moser/invenio-users-resources | fcba667d4fcbf5df9cd0dcbd8118c041a1add6e7 | [
"MIT"
] | 7 | 2022-01-27T09:39:58.000Z | 2022-03-29T11:33:52.000Z | invenio_users_resources/records/mappings/v6/__init__.py | max-moser/invenio-users-resources | fcba667d4fcbf5df9cd0dcbd8118c041a1add6e7 | [
"MIT"
] | 4 | 2022-01-27T10:42:57.000Z | 2022-02-16T11:53:16.000Z | # -*- coding: utf-8 -*-
#
# Copyright (C) 2022 TU Wien.
#
# Invenio-Users-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Elasticsearch version 6 mappings."""
| 25.9 | 74 | 0.706564 | true | true | |
f73d413ddc2041b0e4a587441969c56b3d8ee08c | 633 | py | Python | arvinddhindsa/manage.py | Dhindsa91/Django-blog | 072dfecdc13839370aca9fc5a062eb8f9eea17b5 | [
"bzip2-1.0.6"
] | null | null | null | arvinddhindsa/manage.py | Dhindsa91/Django-blog | 072dfecdc13839370aca9fc5a062eb8f9eea17b5 | [
"bzip2-1.0.6"
] | null | null | null | arvinddhindsa/manage.py | Dhindsa91/Django-blog | 072dfecdc13839370aca9fc5a062eb8f9eea17b5 | [
"bzip2-1.0.6"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'arvinddhindsa.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.772727 | 77 | 0.685624 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'arvinddhindsa.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
f73d4212a3c0e401c47f044073a77f35f5bcc306 | 1,484 | py | Python | robo/maximizers/differential_evolution.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | 455 | 2015-04-02T06:12:13.000Z | 2022-02-28T10:54:29.000Z | robo/maximizers/differential_evolution.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | 66 | 2015-04-07T15:20:55.000Z | 2021-06-04T16:40:46.000Z | robo/maximizers/differential_evolution.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | 188 | 2015-04-14T09:42:34.000Z | 2022-03-31T21:04:53.000Z | import sys
import numpy as np
import scipy as sp
from robo.maximizers.base_maximizer import BaseMaximizer
class DifferentialEvolution(BaseMaximizer):
def __init__(self, objective_function, lower, upper, n_iters=20, rng=None):
"""
Parameters
----------
objective_function: acquisition function
The acquisition function which will be maximized
lower: np.ndarray (D)
Lower bounds of the input space
upper: np.ndarray (D)
Upper bounds of the input space
n_iters: int
Number of iterations
"""
self.n_iters = n_iters
super(DifferentialEvolution, self).__init__(objective_function, lower, upper, rng)
def _acquisition_fkt_wrapper(self, acq_f):
def _l(x):
a = -acq_f(np.array([np.clip(x, self.lower, self.upper)]))
if np.any(np.isinf(a)):
return sys.float_info.max
return a
return _l
def maximize(self):
"""
Maximizes the given acquisition function.
Returns
-------
np.ndarray(N,D)
Point with highest acquisition value.
"""
bounds = list(zip(self.lower, self.upper))
res = sp.optimize.differential_evolution(self._acquisition_fkt_wrapper(self.objective_func),
bounds, maxiter=self.n_iters)
return np.clip(res["x"], self.lower, self.upper)
| 28.538462 | 100 | 0.59097 | import sys
import numpy as np
import scipy as sp
from robo.maximizers.base_maximizer import BaseMaximizer
class DifferentialEvolution(BaseMaximizer):
def __init__(self, objective_function, lower, upper, n_iters=20, rng=None):
self.n_iters = n_iters
super(DifferentialEvolution, self).__init__(objective_function, lower, upper, rng)
def _acquisition_fkt_wrapper(self, acq_f):
def _l(x):
a = -acq_f(np.array([np.clip(x, self.lower, self.upper)]))
if np.any(np.isinf(a)):
return sys.float_info.max
return a
return _l
def maximize(self):
bounds = list(zip(self.lower, self.upper))
res = sp.optimize.differential_evolution(self._acquisition_fkt_wrapper(self.objective_func),
bounds, maxiter=self.n_iters)
return np.clip(res["x"], self.lower, self.upper)
| true | true |
f73d42c3790ee20048eca8b989b817adefc2d236 | 622 | py | Python | debug.py | yemaozi88/Intelligibility-MetricGAN | 7aa080aa78f85ada2d85dd5415c3c593fe207891 | [
"BSD-3-Clause"
] | 41 | 2020-04-03T04:37:21.000Z | 2022-02-11T13:46:47.000Z | debug.py | calculusoflambdas/Intelligibility-MetricGAN | 883aa9b022af29a517fb50e860933942367ccf07 | [
"BSD-3-Clause"
] | 4 | 2020-04-06T07:19:56.000Z | 2021-02-04T09:46:11.000Z | debug.py | calculusoflambdas/Intelligibility-MetricGAN | 883aa9b022af29a517fb50e860933942367ccf07 | [
"BSD-3-Clause"
] | 12 | 2020-04-03T15:09:46.000Z | 2021-09-02T10:51:01.000Z | from audio_util import *
from dataloader import *
noise_path = '/home/smg/haoyuli/SiibGAN/database/Train/Noise/'
clean_path = '/home/smg/haoyuli/SiibGAN/database/Train/Clean/'
gen_file_list = get_filepaths('/home/smg/haoyuli/SiibGAN/database/Train/Clean/')
genloader = create_dataloader(gen_file_list,noise_path)
x = iter(genloader)
bandClean,bandNoise,clean_mag,clean_phase,noise_mag,noise_phase, target = x.next()
enh_file_list = ['0.2228,/home/smg/haoyuli/SiibGAN/database/Train/Enhance/Train_32.wav']
disloader = create_dataloader(enh_file_list,noise_path,clean_path,'D')
x = iter(disloader)
a,b,c = x.next()
| 29.619048 | 88 | 0.782958 | from audio_util import *
from dataloader import *
noise_path = '/home/smg/haoyuli/SiibGAN/database/Train/Noise/'
clean_path = '/home/smg/haoyuli/SiibGAN/database/Train/Clean/'
gen_file_list = get_filepaths('/home/smg/haoyuli/SiibGAN/database/Train/Clean/')
genloader = create_dataloader(gen_file_list,noise_path)
x = iter(genloader)
bandClean,bandNoise,clean_mag,clean_phase,noise_mag,noise_phase, target = x.next()
enh_file_list = ['0.2228,/home/smg/haoyuli/SiibGAN/database/Train/Enhance/Train_32.wav']
disloader = create_dataloader(enh_file_list,noise_path,clean_path,'D')
x = iter(disloader)
a,b,c = x.next()
| true | true |
f73d43091f3eaf436c6022660f2def62abd16e3e | 48,352 | py | Python | pyqtgraph/multiprocess/remoteproxy.py | pbmanis/pyqtgraph | 3558216be2b50d6b0069c82e51e5a048dad34c73 | [
"MIT"
] | 150 | 2018-03-27T16:45:37.000Z | 2022-03-30T03:47:56.000Z | pyqtgraph/multiprocess/remoteproxy.py | Jhongesell/pyqtgraph | 229f650adfd04053213fe6567d6308a4751a349b | [
"MIT"
] | 34 | 2018-09-28T00:01:59.000Z | 2022-03-21T15:40:02.000Z | pyqtgraph/multiprocess/remoteproxy.py | Jhongesell/pyqtgraph | 229f650adfd04053213fe6567d6308a4751a349b | [
"MIT"
] | 40 | 2018-04-06T19:42:21.000Z | 2022-01-11T00:34:17.000Z | import os, time, sys, traceback, weakref
import numpy as np
import threading
try:
import __builtin__ as builtins
import cPickle as pickle
except ImportError:
import builtins
import pickle
# color printing for debugging
from ..util import cprint
class ClosedError(Exception):
"""Raised when an event handler receives a request to close the connection
or discovers that the connection has been closed."""
pass
class NoResultError(Exception):
"""Raised when a request for the return value of a remote call fails
because the call has not yet returned."""
pass
class RemoteEventHandler(object):
"""
This class handles communication between two processes. One instance is present on
each process and listens for communication from the other process. This enables
(amongst other things) ObjectProxy instances to look up their attributes and call
their methods.
This class is responsible for carrying out actions on behalf of the remote process.
Each instance holds one end of a Connection which allows python
objects to be passed between processes.
For the most common operations, see _import(), close(), and transfer()
To handle and respond to incoming requests, RemoteEventHandler requires that its
processRequests method is called repeatedly (this is usually handled by the Process
classes defined in multiprocess.processes).
"""
handlers = {} ## maps {process ID : handler}. This allows unpickler to determine which process
## an object proxy belongs to
def __init__(self, connection, name, pid, debug=False):
self.debug = debug
self.conn = connection
self.name = name
self.results = {} ## reqId: (status, result); cache of request results received from the remote process
## status is either 'result' or 'error'
## if 'error', then result will be (exception, formatted exceprion)
## where exception may be None if it could not be passed through the Connection.
self.resultLock = threading.RLock()
self.proxies = {} ## maps {weakref(proxy): proxyId}; used to inform the remote process when a proxy has been deleted.
self.proxyLock = threading.RLock()
## attributes that affect the behavior of the proxy.
## See ObjectProxy._setProxyOptions for description
self.proxyOptions = {
'callSync': 'sync', ## 'sync', 'async', 'off'
'timeout': 10, ## float
'returnType': 'auto', ## 'proxy', 'value', 'auto'
'autoProxy': False, ## bool
'deferGetattr': False, ## True, False
'noProxyTypes': [ type(None), str, int, float, tuple, list, dict, LocalObjectProxy, ObjectProxy ],
}
if int(sys.version[0]) < 3:
self.proxyOptions['noProxyTypes'].append(unicode)
else:
self.proxyOptions['noProxyTypes'].append(bytes)
self.optsLock = threading.RLock()
self.nextRequestId = 0
self.exited = False
# Mutexes to help prevent issues when multiple threads access the same RemoteEventHandler
self.processLock = threading.RLock()
self.sendLock = threading.RLock()
RemoteEventHandler.handlers[pid] = self ## register this handler as the one communicating with pid
@classmethod
def getHandler(cls, pid):
try:
return cls.handlers[pid]
except:
print(pid, cls.handlers)
raise
def debugMsg(self, msg, *args):
if not self.debug:
return
cprint.cout(self.debug, "[%d] %s\n" % (os.getpid(), str(msg)%args), -1)
def getProxyOption(self, opt):
with self.optsLock:
return self.proxyOptions[opt]
def setProxyOptions(self, **kwds):
"""
Set the default behavior options for object proxies.
See ObjectProxy._setProxyOptions for more info.
"""
with self.optsLock:
self.proxyOptions.update(kwds)
def processRequests(self):
"""Process all pending requests from the pipe, return
after no more events are immediately available. (non-blocking)
Returns the number of events processed.
"""
with self.processLock:
if self.exited:
self.debugMsg(' processRequests: exited already; raise ClosedError.')
raise ClosedError()
numProcessed = 0
while self.conn.poll():
#try:
#poll = self.conn.poll()
#if not poll:
#break
#except IOError: # this can happen if the remote process dies.
## might it also happen in other circumstances?
#raise ClosedError()
try:
self.handleRequest()
numProcessed += 1
except ClosedError:
self.debugMsg('processRequests: got ClosedError from handleRequest; setting exited=True.')
self.exited = True
raise
#except IOError as err: ## let handleRequest take care of this.
#self.debugMsg(' got IOError from handleRequest; try again.')
#if err.errno == 4: ## interrupted system call; try again
#continue
#else:
#raise
except:
print("Error in process %s" % self.name)
sys.excepthook(*sys.exc_info())
if numProcessed > 0:
self.debugMsg('processRequests: finished %d requests', numProcessed)
return numProcessed
def handleRequest(self):
"""Handle a single request from the remote process.
Blocks until a request is available."""
result = None
while True:
try:
## args, kwds are double-pickled to ensure this recv() call never fails
cmd, reqId, nByteMsgs, optStr = self.conn.recv()
break
except EOFError:
self.debugMsg(' handleRequest: got EOFError from recv; raise ClosedError.')
## remote process has shut down; end event loop
raise ClosedError()
except IOError as err:
if err.errno == 4: ## interrupted system call; try again
self.debugMsg(' handleRequest: got IOError 4 from recv; try again.')
continue
else:
self.debugMsg(' handleRequest: got IOError %d from recv (%s); raise ClosedError.', err.errno, err.strerror)
raise ClosedError()
self.debugMsg(" handleRequest: received %s %s", cmd, reqId)
## read byte messages following the main request
byteData = []
if nByteMsgs > 0:
self.debugMsg(" handleRequest: reading %d byte messages", nByteMsgs)
for i in range(nByteMsgs):
while True:
try:
byteData.append(self.conn.recv_bytes())
break
except EOFError:
self.debugMsg(" handleRequest: got EOF while reading byte messages; raise ClosedError.")
raise ClosedError()
except IOError as err:
if err.errno == 4:
self.debugMsg(" handleRequest: got IOError 4 while reading byte messages; try again.")
continue
else:
self.debugMsg(" handleRequest: got IOError while reading byte messages; raise ClosedError.")
raise ClosedError()
try:
if cmd == 'result' or cmd == 'error':
resultId = reqId
reqId = None ## prevents attempt to return information from this request
## (this is already a return from a previous request)
opts = pickle.loads(optStr)
self.debugMsg(" handleRequest: id=%s opts=%s", reqId, opts)
#print os.getpid(), "received request:", cmd, reqId, opts
returnType = opts.get('returnType', 'auto')
if cmd == 'result':
with self.resultLock:
self.results[resultId] = ('result', opts['result'])
elif cmd == 'error':
with self.resultLock:
self.results[resultId] = ('error', (opts['exception'], opts['excString']))
elif cmd == 'getObjAttr':
result = getattr(opts['obj'], opts['attr'])
elif cmd == 'callObj':
obj = opts['obj']
fnargs = opts['args']
fnkwds = opts['kwds']
## If arrays were sent as byte messages, they must be re-inserted into the
## arguments
if len(byteData) > 0:
for i,arg in enumerate(fnargs):
if isinstance(arg, tuple) and len(arg) > 0 and arg[0] == '__byte_message__':
ind = arg[1]
dtype, shape = arg[2]
fnargs[i] = np.fromstring(byteData[ind], dtype=dtype).reshape(shape)
for k,arg in fnkwds.items():
if isinstance(arg, tuple) and len(arg) > 0 and arg[0] == '__byte_message__':
ind = arg[1]
dtype, shape = arg[2]
fnkwds[k] = np.fromstring(byteData[ind], dtype=dtype).reshape(shape)
if len(fnkwds) == 0: ## need to do this because some functions do not allow keyword arguments.
try:
result = obj(*fnargs)
except:
print("Failed to call object %s: %d, %s" % (obj, len(fnargs), fnargs[1:]))
raise
else:
result = obj(*fnargs, **fnkwds)
elif cmd == 'getObjValue':
result = opts['obj'] ## has already been unpickled into its local value
returnType = 'value'
elif cmd == 'transfer':
result = opts['obj']
returnType = 'proxy'
elif cmd == 'transferArray':
## read array data from next message:
result = np.fromstring(byteData[0], dtype=opts['dtype']).reshape(opts['shape'])
returnType = 'proxy'
elif cmd == 'import':
name = opts['module']
fromlist = opts.get('fromlist', [])
mod = builtins.__import__(name, fromlist=fromlist)
if len(fromlist) == 0:
parts = name.lstrip('.').split('.')
result = mod
for part in parts[1:]:
result = getattr(result, part)
else:
result = map(mod.__getattr__, fromlist)
elif cmd == 'del':
LocalObjectProxy.releaseProxyId(opts['proxyId'])
#del self.proxiedObjects[opts['objId']]
elif cmd == 'close':
if reqId is not None:
result = True
returnType = 'value'
exc = None
except:
exc = sys.exc_info()
if reqId is not None:
if exc is None:
self.debugMsg(" handleRequest: sending return value for %d: %s", reqId, result)
#print "returnValue:", returnValue, result
if returnType == 'auto':
with self.optsLock:
noProxyTypes = self.proxyOptions['noProxyTypes']
result = self.autoProxy(result, noProxyTypes)
elif returnType == 'proxy':
result = LocalObjectProxy(result)
try:
self.replyResult(reqId, result)
except:
sys.excepthook(*sys.exc_info())
self.replyError(reqId, *sys.exc_info())
else:
self.debugMsg(" handleRequest: returning exception for %d", reqId)
self.replyError(reqId, *exc)
elif exc is not None:
sys.excepthook(*exc)
if cmd == 'close':
if opts.get('noCleanup', False) is True:
os._exit(0) ## exit immediately, do not pass GO, do not collect $200.
## (more importantly, do not call any code that would
## normally be invoked at exit)
else:
raise ClosedError()
def replyResult(self, reqId, result):
self.send(request='result', reqId=reqId, callSync='off', opts=dict(result=result))
def replyError(self, reqId, *exc):
print("error: %s %s %s" % (self.name, str(reqId), str(exc[1])))
excStr = traceback.format_exception(*exc)
try:
self.send(request='error', reqId=reqId, callSync='off', opts=dict(exception=exc[1], excString=excStr))
except:
self.send(request='error', reqId=reqId, callSync='off', opts=dict(exception=None, excString=excStr))
def send(self, request, opts=None, reqId=None, callSync='sync', timeout=10, returnType=None, byteData=None, **kwds):
"""Send a request or return packet to the remote process.
Generally it is not necessary to call this method directly; it is for internal use.
(The docstring has information that is nevertheless useful to the programmer
as it describes the internal protocol used to communicate between processes)
============== ====================================================================
**Arguments:**
request String describing the type of request being sent (see below)
reqId Integer uniquely linking a result back to the request that generated
it. (most requests leave this blank)
callSync 'sync': return the actual result of the request
'async': return a Request object which can be used to look up the
result later
'off': return no result
timeout Time in seconds to wait for a response when callSync=='sync'
opts Extra arguments sent to the remote process that determine the way
the request will be handled (see below)
returnType 'proxy', 'value', or 'auto'
byteData If specified, this is a list of objects to be sent as byte messages
to the remote process.
This is used to send large arrays without the cost of pickling.
============== ====================================================================
Description of request strings and options allowed for each:
============= ============= ========================================================
request option description
------------- ------------- --------------------------------------------------------
getObjAttr Request the remote process return (proxy to) an
attribute of an object.
obj reference to object whose attribute should be
returned
attr string name of attribute to return
returnValue bool or 'auto' indicating whether to return a proxy or
the actual value.
callObj Request the remote process call a function or
method. If a request ID is given, then the call's
return value will be sent back (or information
about the error that occurred while running the
function)
obj the (reference to) object to call
args tuple of arguments to pass to callable
kwds dict of keyword arguments to pass to callable
returnValue bool or 'auto' indicating whether to return a proxy or
the actual value.
getObjValue Request the remote process return the value of
a proxied object (must be picklable)
obj reference to object whose value should be returned
transfer Copy an object to the remote process and request
it return a proxy for the new object.
obj The object to transfer.
import Request the remote process import new symbols
and return proxy(ies) to the imported objects
module the string name of the module to import
fromlist optional list of string names to import from module
del Inform the remote process that a proxy has been
released (thus the remote process may be able to
release the original object)
proxyId id of proxy which is no longer referenced by
remote host
close Instruct the remote process to stop its event loop
and exit. Optionally, this request may return a
confirmation.
result Inform the remote process that its request has
been processed
result return value of a request
error Inform the remote process that its request failed
exception the Exception that was raised (or None if the
exception could not be pickled)
excString string-formatted version of the exception and
traceback
============= =====================================================================
"""
if self.exited:
self.debugMsg(' send: exited already; raise ClosedError.')
raise ClosedError()
with self.sendLock:
#if len(kwds) > 0:
#print "Warning: send() ignored args:", kwds
if opts is None:
opts = {}
assert callSync in ['off', 'sync', 'async'], 'callSync must be one of "off", "sync", or "async" (got %r)' % callSync
if reqId is None:
if callSync != 'off': ## requested return value; use the next available request ID
reqId = self.nextRequestId
self.nextRequestId += 1
else:
## If requestId is provided, this _must_ be a response to a previously received request.
assert request in ['result', 'error']
if returnType is not None:
opts['returnType'] = returnType
#print os.getpid(), "send request:", request, reqId, opts
## double-pickle args to ensure that at least status and request ID get through
try:
optStr = pickle.dumps(opts)
except:
print("==== Error pickling this object: ====")
print(opts)
print("=======================================")
raise
nByteMsgs = 0
if byteData is not None:
nByteMsgs = len(byteData)
## Send primary request
request = (request, reqId, nByteMsgs, optStr)
self.debugMsg('send request: cmd=%s nByteMsgs=%d id=%s opts=%s', request[0], nByteMsgs, reqId, opts)
self.conn.send(request)
## follow up by sending byte messages
if byteData is not None:
for obj in byteData: ## Remote process _must_ be prepared to read the same number of byte messages!
self.conn.send_bytes(obj)
self.debugMsg(' sent %d byte messages', len(byteData))
self.debugMsg(' call sync: %s', callSync)
if callSync == 'off':
return
req = Request(self, reqId, description=str(request), timeout=timeout)
if callSync == 'async':
return req
if callSync == 'sync':
return req.result()
def close(self, callSync='off', noCleanup=False, **kwds):
try:
self.send(request='close', opts=dict(noCleanup=noCleanup), callSync=callSync, **kwds)
self.exited = True
except ClosedError:
pass
def getResult(self, reqId):
## raises NoResultError if the result is not available yet
#print self.results.keys(), os.getpid()
with self.resultLock:
haveResult = reqId in self.results
if not haveResult:
try:
self.processRequests()
except ClosedError: ## even if remote connection has closed, we may have
## received new data during this call to processRequests()
pass
with self.resultLock:
if reqId not in self.results:
raise NoResultError()
status, result = self.results.pop(reqId)
if status == 'result':
return result
elif status == 'error':
#print ''.join(result)
exc, excStr = result
if exc is not None:
print("===== Remote process raised exception on request: =====")
print(''.join(excStr))
print("===== Local Traceback to request follows: =====")
raise exc
else:
print(''.join(excStr))
raise Exception("Error getting result. See above for exception from remote process.")
else:
raise Exception("Internal error.")
def _import(self, mod, **kwds):
"""
Request the remote process import a module (or symbols from a module)
and return the proxied results. Uses built-in __import__() function, but
adds a bit more processing:
_import('module') => returns module
_import('module.submodule') => returns submodule
(note this differs from behavior of __import__)
_import('module', fromlist=[name1, name2, ...]) => returns [module.name1, module.name2, ...]
(this also differs from behavior of __import__)
"""
return self.send(request='import', callSync='sync', opts=dict(module=mod), **kwds)
def getObjAttr(self, obj, attr, **kwds):
return self.send(request='getObjAttr', opts=dict(obj=obj, attr=attr), **kwds)
def getObjValue(self, obj, **kwds):
return self.send(request='getObjValue', opts=dict(obj=obj), **kwds)
def callObj(self, obj, args, kwds, **opts):
opts = opts.copy()
args = list(args)
## Decide whether to send arguments by value or by proxy
with self.optsLock:
noProxyTypes = opts.pop('noProxyTypes', None)
if noProxyTypes is None:
noProxyTypes = self.proxyOptions['noProxyTypes']
autoProxy = opts.pop('autoProxy', self.proxyOptions['autoProxy'])
if autoProxy is True:
args = [self.autoProxy(v, noProxyTypes) for v in args]
for k, v in kwds.items():
opts[k] = self.autoProxy(v, noProxyTypes)
byteMsgs = []
## If there are arrays in the arguments, send those as byte messages.
## We do this because pickling arrays is too expensive.
for i,arg in enumerate(args):
if arg.__class__ == np.ndarray:
args[i] = ("__byte_message__", len(byteMsgs), (arg.dtype, arg.shape))
byteMsgs.append(arg)
for k,v in kwds.items():
if v.__class__ == np.ndarray:
kwds[k] = ("__byte_message__", len(byteMsgs), (v.dtype, v.shape))
byteMsgs.append(v)
return self.send(request='callObj', opts=dict(obj=obj, args=args, kwds=kwds), byteData=byteMsgs, **opts)
def registerProxy(self, proxy):
with self.proxyLock:
ref = weakref.ref(proxy, self.deleteProxy)
self.proxies[ref] = proxy._proxyId
def deleteProxy(self, ref):
if self.send is None:
# this can happen during shutdown
return
with self.proxyLock:
proxyId = self.proxies.pop(ref)
try:
self.send(request='del', opts=dict(proxyId=proxyId), callSync='off')
except ClosedError: ## if remote process has closed down, there is no need to send delete requests anymore
pass
def transfer(self, obj, **kwds):
"""
Transfer an object by value to the remote host (the object must be picklable)
and return a proxy for the new remote object.
"""
if obj.__class__ is np.ndarray:
opts = {'dtype': obj.dtype, 'shape': obj.shape}
return self.send(request='transferArray', opts=opts, byteData=[obj], **kwds)
else:
return self.send(request='transfer', opts=dict(obj=obj), **kwds)
def autoProxy(self, obj, noProxyTypes):
## Return object wrapped in LocalObjectProxy _unless_ its type is in noProxyTypes.
for typ in noProxyTypes:
if isinstance(obj, typ):
return obj
return LocalObjectProxy(obj)
class Request(object):
"""
Request objects are returned when calling an ObjectProxy in asynchronous mode
or if a synchronous call has timed out. Use hasResult() to ask whether
the result of the call has been returned yet. Use result() to get
the returned value.
"""
def __init__(self, process, reqId, description=None, timeout=10):
self.proc = process
self.description = description
self.reqId = reqId
self.gotResult = False
self._result = None
self.timeout = timeout
def result(self, block=True, timeout=None):
"""
Return the result for this request.
If block is True, wait until the result has arrived or *timeout* seconds passes.
If the timeout is reached, raise NoResultError. (use timeout=None to disable)
If block is False, raise NoResultError immediately if the result has not arrived yet.
If the process's connection has closed before the result arrives, raise ClosedError.
"""
if self.gotResult:
return self._result
if timeout is None:
timeout = self.timeout
if block:
start = time.time()
while not self.hasResult():
if self.proc.exited:
raise ClosedError()
time.sleep(0.005)
if timeout >= 0 and time.time() - start > timeout:
print("Request timed out: %s" % self.description)
import traceback
traceback.print_stack()
raise NoResultError()
return self._result
else:
self._result = self.proc.getResult(self.reqId) ## raises NoResultError if result is not available yet
self.gotResult = True
return self._result
def hasResult(self):
"""Returns True if the result for this request has arrived."""
try:
self.result(block=False)
except NoResultError:
pass
return self.gotResult
class LocalObjectProxy(object):
"""
Used for wrapping local objects to ensure that they are send by proxy to a remote host.
Note that 'proxy' is just a shorter alias for LocalObjectProxy.
For example::
data = [1,2,3,4,5]
remotePlot.plot(data) ## by default, lists are pickled and sent by value
remotePlot.plot(proxy(data)) ## force the object to be sent by proxy
"""
nextProxyId = 0
proxiedObjects = {} ## maps {proxyId: object}
@classmethod
def registerObject(cls, obj):
## assign it a unique ID so we can keep a reference to the local object
pid = cls.nextProxyId
cls.nextProxyId += 1
cls.proxiedObjects[pid] = obj
#print "register:", cls.proxiedObjects
return pid
@classmethod
def lookupProxyId(cls, pid):
return cls.proxiedObjects[pid]
@classmethod
def releaseProxyId(cls, pid):
del cls.proxiedObjects[pid]
#print "release:", cls.proxiedObjects
def __init__(self, obj, **opts):
"""
Create a 'local' proxy object that, when sent to a remote host,
will appear as a normal ObjectProxy to *obj*.
Any extra keyword arguments are passed to proxy._setProxyOptions()
on the remote side.
"""
self.processId = os.getpid()
#self.objectId = id(obj)
self.typeStr = repr(obj)
#self.handler = handler
self.obj = obj
self.opts = opts
def __reduce__(self):
## a proxy is being pickled and sent to a remote process.
## every time this happens, a new proxy will be generated in the remote process,
## so we keep a new ID so we can track when each is released.
pid = LocalObjectProxy.registerObject(self.obj)
return (unpickleObjectProxy, (self.processId, pid, self.typeStr, None, self.opts))
## alias
proxy = LocalObjectProxy
def unpickleObjectProxy(processId, proxyId, typeStr, attributes=None, opts=None):
if processId == os.getpid():
obj = LocalObjectProxy.lookupProxyId(proxyId)
if attributes is not None:
for attr in attributes:
obj = getattr(obj, attr)
return obj
else:
proxy = ObjectProxy(processId, proxyId=proxyId, typeStr=typeStr)
if opts is not None:
proxy._setProxyOptions(**opts)
return proxy
class ObjectProxy(object):
"""
Proxy to an object stored by the remote process. Proxies are created
by calling Process._import(), Process.transfer(), or by requesting/calling
attributes on existing proxy objects.
For the most part, this object can be used exactly as if it
were a local object::
rsys = proc._import('sys') # returns proxy to sys module on remote process
rsys.stdout # proxy to remote sys.stdout
rsys.stdout.write # proxy to remote sys.stdout.write
rsys.stdout.write('hello') # calls sys.stdout.write('hello') on remote machine
# and returns the result (None)
When calling a proxy to a remote function, the call can be made synchronous
(result of call is returned immediately), asynchronous (result is returned later),
or return can be disabled entirely::
ros = proc._import('os')
## synchronous call; result is returned immediately
pid = ros.getpid()
## asynchronous call
request = ros.getpid(_callSync='async')
while not request.hasResult():
time.sleep(0.01)
pid = request.result()
## disable return when we know it isn't needed
rsys.stdout.write('hello', _callSync='off')
Additionally, values returned from a remote function call are automatically
returned either by value (must be picklable) or by proxy.
This behavior can be forced::
rnp = proc._import('numpy')
arrProxy = rnp.array([1,2,3,4], _returnType='proxy')
arrValue = rnp.array([1,2,3,4], _returnType='value')
The default callSync and returnType behaviors (as well as others) can be set
for each proxy individually using ObjectProxy._setProxyOptions() or globally using
proc.setProxyOptions().
"""
def __init__(self, processId, proxyId, typeStr='', parent=None):
object.__init__(self)
## can't set attributes directly because setattr is overridden.
self.__dict__['_processId'] = processId
self.__dict__['_typeStr'] = typeStr
self.__dict__['_proxyId'] = proxyId
self.__dict__['_attributes'] = ()
## attributes that affect the behavior of the proxy.
## in all cases, a value of None causes the proxy to ask
## its parent event handler to make the decision
self.__dict__['_proxyOptions'] = {
'callSync': None, ## 'sync', 'async', None
'timeout': None, ## float, None
'returnType': None, ## 'proxy', 'value', 'auto', None
'deferGetattr': None, ## True, False, None
'noProxyTypes': None, ## list of types to send by value instead of by proxy
'autoProxy': None,
}
self.__dict__['_handler'] = RemoteEventHandler.getHandler(processId)
self.__dict__['_handler'].registerProxy(self) ## handler will watch proxy; inform remote process when the proxy is deleted.
def _setProxyOptions(self, **kwds):
"""
Change the behavior of this proxy. For all options, a value of None
will cause the proxy to instead use the default behavior defined
by its parent Process.
Options are:
============= =============================================================
callSync 'sync', 'async', 'off', or None.
If 'async', then calling methods will return a Request object
which can be used to inquire later about the result of the
method call.
If 'sync', then calling a method
will block until the remote process has returned its result
or the timeout has elapsed (in this case, a Request object
is returned instead).
If 'off', then the remote process is instructed _not_ to
reply and the method call will return None immediately.
returnType 'auto', 'proxy', 'value', or None.
If 'proxy', then the value returned when calling a method
will be a proxy to the object on the remote process.
If 'value', then attempt to pickle the returned object and
send it back.
If 'auto', then the decision is made by consulting the
'noProxyTypes' option.
autoProxy bool or None. If True, arguments to __call__ are
automatically converted to proxy unless their type is
listed in noProxyTypes (see below). If False, arguments
are left untouched. Use proxy(obj) to manually convert
arguments before sending.
timeout float or None. Length of time to wait during synchronous
requests before returning a Request object instead.
deferGetattr True, False, or None.
If False, all attribute requests will be sent to the remote
process immediately and will block until a response is
received (or timeout has elapsed).
If True, requesting an attribute from the proxy returns a
new proxy immediately. The remote process is _not_ contacted
to make this request. This is faster, but it is possible to
request an attribute that does not exist on the proxied
object. In this case, AttributeError will not be raised
until an attempt is made to look up the attribute on the
remote process.
noProxyTypes List of object types that should _not_ be proxied when
sent to the remote process.
============= =============================================================
"""
for k in kwds:
if k not in self._proxyOptions:
raise KeyError("Unrecognized proxy option '%s'" % k)
self._proxyOptions.update(kwds)
def _getValue(self):
"""
Return the value of the proxied object
(the remote object must be picklable)
"""
return self._handler.getObjValue(self)
def _getProxyOption(self, opt):
val = self._proxyOptions[opt]
if val is None:
return self._handler.getProxyOption(opt)
return val
def _getProxyOptions(self):
return dict([(k, self._getProxyOption(k)) for k in self._proxyOptions])
def __reduce__(self):
return (unpickleObjectProxy, (self._processId, self._proxyId, self._typeStr, self._attributes))
def __repr__(self):
#objRepr = self.__getattr__('__repr__')(callSync='value')
return "<ObjectProxy for process %d, object 0x%x: %s >" % (self._processId, self._proxyId, self._typeStr)
def __getattr__(self, attr, **kwds):
"""
Calls __getattr__ on the remote object and returns the attribute
by value or by proxy depending on the options set (see
ObjectProxy._setProxyOptions and RemoteEventHandler.setProxyOptions)
If the option 'deferGetattr' is True for this proxy, then a new proxy object
is returned _without_ asking the remote object whether the named attribute exists.
This can save time when making multiple chained attribute requests,
but may also defer a possible AttributeError until later, making
them more difficult to debug.
"""
opts = self._getProxyOptions()
for k in opts:
if '_'+k in kwds:
opts[k] = kwds.pop('_'+k)
if opts['deferGetattr'] is True:
return self._deferredAttr(attr)
else:
#opts = self._getProxyOptions()
return self._handler.getObjAttr(self, attr, **opts)
def _deferredAttr(self, attr):
return DeferredObjectProxy(self, attr)
def __call__(self, *args, **kwds):
"""
Attempts to call the proxied object from the remote process.
Accepts extra keyword arguments:
_callSync 'off', 'sync', or 'async'
_returnType 'value', 'proxy', or 'auto'
If the remote call raises an exception on the remote process,
it will be re-raised on the local process.
"""
opts = self._getProxyOptions()
for k in opts:
if '_'+k in kwds:
opts[k] = kwds.pop('_'+k)
return self._handler.callObj(obj=self, args=args, kwds=kwds, **opts)
## Explicitly proxy special methods. Is there a better way to do this??
def _getSpecialAttr(self, attr):
## this just gives us an easy way to change the behavior of the special methods
return self._deferredAttr(attr)
def __getitem__(self, *args):
return self._getSpecialAttr('__getitem__')(*args)
def __setitem__(self, *args):
return self._getSpecialAttr('__setitem__')(*args, _callSync='off')
def __setattr__(self, *args):
return self._getSpecialAttr('__setattr__')(*args, _callSync='off')
def __str__(self, *args):
return self._getSpecialAttr('__str__')(*args, _returnType='value')
def __len__(self, *args):
return self._getSpecialAttr('__len__')(*args)
def __add__(self, *args):
return self._getSpecialAttr('__add__')(*args)
def __sub__(self, *args):
return self._getSpecialAttr('__sub__')(*args)
def __div__(self, *args):
return self._getSpecialAttr('__div__')(*args)
def __truediv__(self, *args):
return self._getSpecialAttr('__truediv__')(*args)
def __floordiv__(self, *args):
return self._getSpecialAttr('__floordiv__')(*args)
def __mul__(self, *args):
return self._getSpecialAttr('__mul__')(*args)
def __pow__(self, *args):
return self._getSpecialAttr('__pow__')(*args)
def __iadd__(self, *args):
return self._getSpecialAttr('__iadd__')(*args, _callSync='off')
def __isub__(self, *args):
return self._getSpecialAttr('__isub__')(*args, _callSync='off')
def __idiv__(self, *args):
return self._getSpecialAttr('__idiv__')(*args, _callSync='off')
def __itruediv__(self, *args):
return self._getSpecialAttr('__itruediv__')(*args, _callSync='off')
def __ifloordiv__(self, *args):
return self._getSpecialAttr('__ifloordiv__')(*args, _callSync='off')
def __imul__(self, *args):
return self._getSpecialAttr('__imul__')(*args, _callSync='off')
def __ipow__(self, *args):
return self._getSpecialAttr('__ipow__')(*args, _callSync='off')
def __rshift__(self, *args):
return self._getSpecialAttr('__rshift__')(*args)
def __lshift__(self, *args):
return self._getSpecialAttr('__lshift__')(*args)
def __irshift__(self, *args):
return self._getSpecialAttr('__irshift__')(*args, _callSync='off')
def __ilshift__(self, *args):
return self._getSpecialAttr('__ilshift__')(*args, _callSync='off')
def __eq__(self, *args):
return self._getSpecialAttr('__eq__')(*args)
def __ne__(self, *args):
return self._getSpecialAttr('__ne__')(*args)
def __lt__(self, *args):
return self._getSpecialAttr('__lt__')(*args)
def __gt__(self, *args):
return self._getSpecialAttr('__gt__')(*args)
def __le__(self, *args):
return self._getSpecialAttr('__le__')(*args)
def __ge__(self, *args):
return self._getSpecialAttr('__ge__')(*args)
def __and__(self, *args):
return self._getSpecialAttr('__and__')(*args)
def __or__(self, *args):
return self._getSpecialAttr('__or__')(*args)
def __xor__(self, *args):
return self._getSpecialAttr('__xor__')(*args)
def __iand__(self, *args):
return self._getSpecialAttr('__iand__')(*args, _callSync='off')
def __ior__(self, *args):
return self._getSpecialAttr('__ior__')(*args, _callSync='off')
def __ixor__(self, *args):
return self._getSpecialAttr('__ixor__')(*args, _callSync='off')
def __mod__(self, *args):
return self._getSpecialAttr('__mod__')(*args)
def __radd__(self, *args):
return self._getSpecialAttr('__radd__')(*args)
def __rsub__(self, *args):
return self._getSpecialAttr('__rsub__')(*args)
def __rdiv__(self, *args):
return self._getSpecialAttr('__rdiv__')(*args)
def __rfloordiv__(self, *args):
return self._getSpecialAttr('__rfloordiv__')(*args)
def __rtruediv__(self, *args):
return self._getSpecialAttr('__rtruediv__')(*args)
def __rmul__(self, *args):
return self._getSpecialAttr('__rmul__')(*args)
def __rpow__(self, *args):
return self._getSpecialAttr('__rpow__')(*args)
def __rrshift__(self, *args):
return self._getSpecialAttr('__rrshift__')(*args)
def __rlshift__(self, *args):
return self._getSpecialAttr('__rlshift__')(*args)
def __rand__(self, *args):
return self._getSpecialAttr('__rand__')(*args)
def __ror__(self, *args):
return self._getSpecialAttr('__ror__')(*args)
def __rxor__(self, *args):
return self._getSpecialAttr('__ror__')(*args)
def __rmod__(self, *args):
return self._getSpecialAttr('__rmod__')(*args)
def __hash__(self):
## Required for python3 since __eq__ is defined.
return id(self)
class DeferredObjectProxy(ObjectProxy):
"""
This class represents an attribute (or sub-attribute) of a proxied object.
It is used to speed up attribute requests. Take the following scenario::
rsys = proc._import('sys')
rsys.stdout.write('hello')
For this simple example, a total of 4 synchronous requests are made to
the remote process:
1) import sys
2) getattr(sys, 'stdout')
3) getattr(stdout, 'write')
4) write('hello')
This takes a lot longer than running the equivalent code locally. To
speed things up, we can 'defer' the two attribute lookups so they are
only carried out when neccessary::
rsys = proc._import('sys')
rsys._setProxyOptions(deferGetattr=True)
rsys.stdout.write('hello')
This example only makes two requests to the remote process; the two
attribute lookups immediately return DeferredObjectProxy instances
immediately without contacting the remote process. When the call
to write() is made, all attribute requests are processed at the same time.
Note that if the attributes requested do not exist on the remote object,
making the call to write() will raise an AttributeError.
"""
def __init__(self, parentProxy, attribute):
## can't set attributes directly because setattr is overridden.
for k in ['_processId', '_typeStr', '_proxyId', '_handler']:
self.__dict__[k] = getattr(parentProxy, k)
self.__dict__['_parent'] = parentProxy ## make sure parent stays alive
self.__dict__['_attributes'] = parentProxy._attributes + (attribute,)
self.__dict__['_proxyOptions'] = parentProxy._proxyOptions.copy()
def __repr__(self):
return ObjectProxy.__repr__(self) + '.' + '.'.join(self._attributes)
def _undefer(self):
"""
Return a non-deferred ObjectProxy referencing the same object
"""
return self._parent.__getattr__(self._attributes[-1], _deferGetattr=False)
| 42.865248 | 132 | 0.544197 | import os, time, sys, traceback, weakref
import numpy as np
import threading
try:
import __builtin__ as builtins
import cPickle as pickle
except ImportError:
import builtins
import pickle
from ..util import cprint
class ClosedError(Exception):
pass
class NoResultError(Exception):
pass
class RemoteEventHandler(object):
handlers = {}
self.debug = debug
self.conn = connection
self.name = name
self.results = {} LocalObjectProxy, ObjectProxy ],
}
if int(sys.version[0]) < 3:
self.proxyOptions['noProxyTypes'].append(unicode)
else:
self.proxyOptions['noProxyTypes'].append(bytes)
self.optsLock = threading.RLock()
self.nextRequestId = 0
self.exited = False
self.processLock = threading.RLock()
self.sendLock = threading.RLock()
RemoteEventHandler.handlers[pid] = self try:
return cls.handlers[pid]
except:
print(pid, cls.handlers)
raise
def debugMsg(self, msg, *args):
if not self.debug:
return
cprint.cout(self.debug, "[%d] %s\n" % (os.getpid(), str(msg)%args), -1)
def getProxyOption(self, opt):
with self.optsLock:
return self.proxyOptions[opt]
def setProxyOptions(self, **kwds):
with self.optsLock:
self.proxyOptions.update(kwds)
def processRequests(self):
with self.processLock:
if self.exited:
self.debugMsg(' processRequests: exited already; raise ClosedError.')
raise ClosedError()
numProcessed = 0
while self.conn.poll():
try:
self.handleRequest()
numProcessed += 1
except ClosedError:
self.debugMsg('processRequests: got ClosedError from handleRequest; setting exited=True.')
self.exited = True
raise
print("Error in process %s" % self.name)
sys.excepthook(*sys.exc_info())
if numProcessed > 0:
self.debugMsg('processRequests: finished %d requests', numProcessed)
return numProcessed
def handleRequest(self):
result = None
while True:
try:
ak
except EOFError:
self.debugMsg(' handleRequest: got EOFError from recv; raise ClosedError.')
except IOError as err:
if err.errno == 4: handleRequest: got IOError 4 from recv; try again.')
continue
else:
self.debugMsg(' handleRequest: got IOError %d from recv (%s); raise ClosedError.', err.errno, err.strerror)
raise ClosedError()
self.debugMsg(" handleRequest: received %s %s", cmd, reqId)
:
self.debugMsg(" handleRequest: reading %d byte messages", nByteMsgs)
for i in range(nByteMsgs):
while True:
try:
byteData.append(self.conn.recv_bytes())
break
except EOFError:
self.debugMsg(" handleRequest: got EOF while reading byte messages; raise ClosedError.")
raise ClosedError()
except IOError as err:
if err.errno == 4:
self.debugMsg(" handleRequest: got IOError 4 while reading byte messages; try again.")
continue
else:
self.debugMsg(" handleRequest: got IOError while reading byte messages; raise ClosedError.")
raise ClosedError()
try:
if cmd == 'result' or cmd == 'error':
resultId = reqId
reqId = None (" handleRequest: id=%s opts=%s", reqId, opts)
returnType = opts.get('returnType', 'auto')
if cmd == 'result':
with self.resultLock:
self.results[resultId] = ('result', opts['result'])
elif cmd == 'error':
with self.resultLock:
self.results[resultId] = ('error', (opts['exception'], opts['excString']))
elif cmd == 'getObjAttr':
result = getattr(opts['obj'], opts['attr'])
elif cmd == 'callObj':
obj = opts['obj']
fnargs = opts['args']
fnkwds = opts['kwds']
in enumerate(fnargs):
if isinstance(arg, tuple) and len(arg) > 0 and arg[0] == '__byte_message__':
ind = arg[1]
dtype, shape = arg[2]
fnargs[i] = np.fromstring(byteData[ind], dtype=dtype).reshape(shape)
for k,arg in fnkwds.items():
if isinstance(arg, tuple) and len(arg) > 0 and arg[0] == '__byte_message__':
ind = arg[1]
dtype, shape = arg[2]
fnkwds[k] = np.fromstring(byteData[ind], dtype=dtype).reshape(shape)
if len(fnkwds) == 0: except:
print("Failed to call object %s: %d, %s" % (obj, len(fnargs), fnargs[1:]))
raise
else:
result = obj(*fnargs, **fnkwds)
elif cmd == 'getObjValue':
result = opts['obj'] elif cmd == 'transfer':
result = opts['obj']
returnType = 'proxy'
elif cmd == 'transferArray':
ing(byteData[0], dtype=opts['dtype']).reshape(opts['shape'])
returnType = 'proxy'
elif cmd == 'import':
name = opts['module']
fromlist = opts.get('fromlist', [])
mod = builtins.__import__(name, fromlist=fromlist)
if len(fromlist) == 0:
parts = name.lstrip('.').split('.')
result = mod
for part in parts[1:]:
result = getattr(result, part)
else:
result = map(mod.__getattr__, fromlist)
elif cmd == 'del':
LocalObjectProxy.releaseProxyId(opts['proxyId'])
elif cmd == 'close':
if reqId is not None:
result = True
returnType = 'value'
exc = None
except:
exc = sys.exc_info()
if reqId is not None:
if exc is None:
self.debugMsg(" handleRequest: sending return value for %d: %s", reqId, result)
if returnType == 'auto':
with self.optsLock:
noProxyTypes = self.proxyOptions['noProxyTypes']
result = self.autoProxy(result, noProxyTypes)
elif returnType == 'proxy':
result = LocalObjectProxy(result)
try:
self.replyResult(reqId, result)
except:
sys.excepthook(*sys.exc_info())
self.replyError(reqId, *sys.exc_info())
else:
self.debugMsg(" handleRequest: returning exception for %d", reqId)
self.replyError(reqId, *exc)
elif exc is not None:
sys.excepthook(*exc)
if cmd == 'close':
if opts.get('noCleanup', False) is True:
os._exit(0) def replyResult(self, reqId, result):
self.send(request='result', reqId=reqId, callSync='off', opts=dict(result=result))
def replyError(self, reqId, *exc):
print("error: %s %s %s" % (self.name, str(reqId), str(exc[1])))
excStr = traceback.format_exception(*exc)
try:
self.send(request='error', reqId=reqId, callSync='off', opts=dict(exception=exc[1], excString=excStr))
except:
self.send(request='error', reqId=reqId, callSync='off', opts=dict(exception=None, excString=excStr))
def send(self, request, opts=None, reqId=None, callSync='sync', timeout=10, returnType=None, byteData=None, **kwds):
if self.exited:
self.debugMsg(' send: exited already; raise ClosedError.')
raise ClosedError()
with self.sendLock:
if opts is None:
opts = {}
assert callSync in ['off', 'sync', 'async'], 'callSync must be one of "off", "sync", or "async" (got %r)' % callSync
if reqId is None:
if callSync != 'off': self.nextRequestId += 1
else:
rnType is not None:
opts['returnType'] = returnType
pt:
print("==== Error pickling this object: ====")
print(opts)
print("=======================================")
raise
nByteMsgs = 0
if byteData is not None:
nByteMsgs = len(byteData)
(request, reqId, nByteMsgs, optStr)
self.debugMsg('send request: cmd=%s nByteMsgs=%d id=%s opts=%s', request[0], nByteMsgs, reqId, opts)
self.conn.send(request)
:
for obj in byteData: ' sent %d byte messages', len(byteData))
self.debugMsg(' call sync: %s', callSync)
if callSync == 'off':
return
req = Request(self, reqId, description=str(request), timeout=timeout)
if callSync == 'async':
return req
if callSync == 'sync':
return req.result()
def close(self, callSync='off', noCleanup=False, **kwds):
try:
self.send(request='close', opts=dict(noCleanup=noCleanup), callSync=callSync, **kwds)
self.exited = True
except ClosedError:
pass
def getResult(self, reqId):
esult = reqId in self.results
if not haveResult:
try:
self.processRequests()
except ClosedError: reqId not in self.results:
raise NoResultError()
status, result = self.results.pop(reqId)
if status == 'result':
return result
elif status == 'error':
exc, excStr = result
if exc is not None:
print("===== Remote process raised exception on request: =====")
print(''.join(excStr))
print("===== Local Traceback to request follows: =====")
raise exc
else:
print(''.join(excStr))
raise Exception("Error getting result. See above for exception from remote process.")
else:
raise Exception("Internal error.")
def _import(self, mod, **kwds):
return self.send(request='import', callSync='sync', opts=dict(module=mod), **kwds)
def getObjAttr(self, obj, attr, **kwds):
return self.send(request='getObjAttr', opts=dict(obj=obj, attr=attr), **kwds)
def getObjValue(self, obj, **kwds):
return self.send(request='getObjValue', opts=dict(obj=obj), **kwds)
def callObj(self, obj, args, kwds, **opts):
opts = opts.copy()
args = list(args)
opts.pop('noProxyTypes', None)
if noProxyTypes is None:
noProxyTypes = self.proxyOptions['noProxyTypes']
autoProxy = opts.pop('autoProxy', self.proxyOptions['autoProxy'])
if autoProxy is True:
args = [self.autoProxy(v, noProxyTypes) for v in args]
for k, v in kwds.items():
opts[k] = self.autoProxy(v, noProxyTypes)
byteMsgs = []
byte_message__", len(byteMsgs), (arg.dtype, arg.shape))
byteMsgs.append(arg)
for k,v in kwds.items():
if v.__class__ == np.ndarray:
kwds[k] = ("__byte_message__", len(byteMsgs), (v.dtype, v.shape))
byteMsgs.append(v)
return self.send(request='callObj', opts=dict(obj=obj, args=args, kwds=kwds), byteData=byteMsgs, **opts)
def registerProxy(self, proxy):
with self.proxyLock:
ref = weakref.ref(proxy, self.deleteProxy)
self.proxies[ref] = proxy._proxyId
def deleteProxy(self, ref):
if self.send is None:
return
with self.proxyLock:
proxyId = self.proxies.pop(ref)
try:
self.send(request='del', opts=dict(proxyId=proxyId), callSync='off')
except ClosedError: p.ndarray:
opts = {'dtype': obj.dtype, 'shape': obj.shape}
return self.send(request='transferArray', opts=opts, byteData=[obj], **kwds)
else:
return self.send(request='transfer', opts=dict(obj=obj), **kwds)
def autoProxy(self, obj, noProxyTypes):
return obj
return LocalObjectProxy(obj)
class Request(object):
def __init__(self, process, reqId, description=None, timeout=10):
self.proc = process
self.description = description
self.reqId = reqId
self.gotResult = False
self._result = None
self.timeout = timeout
def result(self, block=True, timeout=None):
if self.gotResult:
return self._result
if timeout is None:
timeout = self.timeout
if block:
start = time.time()
while not self.hasResult():
if self.proc.exited:
raise ClosedError()
time.sleep(0.005)
if timeout >= 0 and time.time() - start > timeout:
print("Request timed out: %s" % self.description)
import traceback
traceback.print_stack()
raise NoResultError()
return self._result
else:
self._result = self.proc.getResult(self.reqId) self._result
def hasResult(self):
try:
self.result(block=False)
except NoResultError:
pass
return self.gotResult
class LocalObjectProxy(object):
nextProxyId = 0
proxiedObjects = {} hod
def registerObject(cls, obj):
cls.proxiedObjects[pid] = obj
return pid
@classmethod
def lookupProxyId(cls, pid):
return cls.proxiedObjects[pid]
@classmethod
def releaseProxyId(cls, pid):
del cls.proxiedObjects[pid]
def __init__(self, obj, **opts):
self.processId = os.getpid()
self.typeStr = repr(obj)
self.obj = obj
self.opts = opts
def __reduce__(self):
def unpickleObjectProxy(processId, proxyId, typeStr, attributes=None, opts=None):
if processId == os.getpid():
obj = LocalObjectProxy.lookupProxyId(proxyId)
if attributes is not None:
for attr in attributes:
obj = getattr(obj, attr)
return obj
else:
proxy = ObjectProxy(processId, proxyId=proxyId, typeStr=typeStr)
if opts is not None:
proxy._setProxyOptions(**opts)
return proxy
class ObjectProxy(object):
def __init__(self, processId, proxyId, typeStr='', parent=None):
object.__init__(self)
__dict__['_typeStr'] = typeStr
self.__dict__['_proxyId'] = proxyId
self.__dict__['_attributes'] = ()
## attributes that affect the behavior of the proxy.
## in all cases, a value of None causes the proxy to ask
## its parent event handler to make the decision
self.__dict__['_proxyOptions'] = {
'callSync': None, ## 'sync', 'async', None
'timeout': None, ## float, None
'returnType': None, ## 'proxy', 'value', 'auto', None
'deferGetattr': None, ## True, False, None
'noProxyTypes': None, ## list of types to send by value instead of by proxy
'autoProxy': None,
}
self.__dict__['_handler'] = RemoteEventHandler.getHandler(processId)
self.__dict__['_handler'].registerProxy(self) ## handler will watch proxy; inform remote process when the proxy is deleted.
def _setProxyOptions(self, **kwds):
for k in kwds:
if k not in self._proxyOptions:
raise KeyError("Unrecognized proxy option '%s'" % k)
self._proxyOptions.update(kwds)
def _getValue(self):
return self._handler.getObjValue(self)
def _getProxyOption(self, opt):
val = self._proxyOptions[opt]
if val is None:
return self._handler.getProxyOption(opt)
return val
def _getProxyOptions(self):
return dict([(k, self._getProxyOption(k)) for k in self._proxyOptions])
def __reduce__(self):
return (unpickleObjectProxy, (self._processId, self._proxyId, self._typeStr, self._attributes))
def __repr__(self):
#objRepr = self.__getattr__('__repr__')(callSync='value')
return "<ObjectProxy for process %d, object 0x%x: %s >" % (self._processId, self._proxyId, self._typeStr)
def __getattr__(self, attr, **kwds):
opts = self._getProxyOptions()
for k in opts:
if '_'+k in kwds:
opts[k] = kwds.pop('_'+k)
if opts['deferGetattr'] is True:
return self._deferredAttr(attr)
else:
#opts = self._getProxyOptions()
return self._handler.getObjAttr(self, attr, **opts)
def _deferredAttr(self, attr):
return DeferredObjectProxy(self, attr)
def __call__(self, *args, **kwds):
opts = self._getProxyOptions()
for k in opts:
if '_'+k in kwds:
opts[k] = kwds.pop('_'+k)
return self._handler.callObj(obj=self, args=args, kwds=kwds, **opts)
## Explicitly proxy special methods. Is there a better way to do this??
def _getSpecialAttr(self, attr):
## this just gives us an easy way to change the behavior of the special methods
return self._deferredAttr(attr)
def __getitem__(self, *args):
return self._getSpecialAttr('__getitem__')(*args)
def __setitem__(self, *args):
return self._getSpecialAttr('__setitem__')(*args, _callSync='off')
def __setattr__(self, *args):
return self._getSpecialAttr('__setattr__')(*args, _callSync='off')
def __str__(self, *args):
return self._getSpecialAttr('__str__')(*args, _returnType='value')
def __len__(self, *args):
return self._getSpecialAttr('__len__')(*args)
def __add__(self, *args):
return self._getSpecialAttr('__add__')(*args)
def __sub__(self, *args):
return self._getSpecialAttr('__sub__')(*args)
def __div__(self, *args):
return self._getSpecialAttr('__div__')(*args)
def __truediv__(self, *args):
return self._getSpecialAttr('__truediv__')(*args)
def __floordiv__(self, *args):
return self._getSpecialAttr('__floordiv__')(*args)
def __mul__(self, *args):
return self._getSpecialAttr('__mul__')(*args)
def __pow__(self, *args):
return self._getSpecialAttr('__pow__')(*args)
def __iadd__(self, *args):
return self._getSpecialAttr('__iadd__')(*args, _callSync='off')
def __isub__(self, *args):
return self._getSpecialAttr('__isub__')(*args, _callSync='off')
def __idiv__(self, *args):
return self._getSpecialAttr('__idiv__')(*args, _callSync='off')
def __itruediv__(self, *args):
return self._getSpecialAttr('__itruediv__')(*args, _callSync='off')
def __ifloordiv__(self, *args):
return self._getSpecialAttr('__ifloordiv__')(*args, _callSync='off')
def __imul__(self, *args):
return self._getSpecialAttr('__imul__')(*args, _callSync='off')
def __ipow__(self, *args):
return self._getSpecialAttr('__ipow__')(*args, _callSync='off')
def __rshift__(self, *args):
return self._getSpecialAttr('__rshift__')(*args)
def __lshift__(self, *args):
return self._getSpecialAttr('__lshift__')(*args)
def __irshift__(self, *args):
return self._getSpecialAttr('__irshift__')(*args, _callSync='off')
def __ilshift__(self, *args):
return self._getSpecialAttr('__ilshift__')(*args, _callSync='off')
def __eq__(self, *args):
return self._getSpecialAttr('__eq__')(*args)
def __ne__(self, *args):
return self._getSpecialAttr('__ne__')(*args)
def __lt__(self, *args):
return self._getSpecialAttr('__lt__')(*args)
def __gt__(self, *args):
return self._getSpecialAttr('__gt__')(*args)
def __le__(self, *args):
return self._getSpecialAttr('__le__')(*args)
def __ge__(self, *args):
return self._getSpecialAttr('__ge__')(*args)
def __and__(self, *args):
return self._getSpecialAttr('__and__')(*args)
def __or__(self, *args):
return self._getSpecialAttr('__or__')(*args)
def __xor__(self, *args):
return self._getSpecialAttr('__xor__')(*args)
def __iand__(self, *args):
return self._getSpecialAttr('__iand__')(*args, _callSync='off')
def __ior__(self, *args):
return self._getSpecialAttr('__ior__')(*args, _callSync='off')
def __ixor__(self, *args):
return self._getSpecialAttr('__ixor__')(*args, _callSync='off')
def __mod__(self, *args):
return self._getSpecialAttr('__mod__')(*args)
def __radd__(self, *args):
return self._getSpecialAttr('__radd__')(*args)
def __rsub__(self, *args):
return self._getSpecialAttr('__rsub__')(*args)
def __rdiv__(self, *args):
return self._getSpecialAttr('__rdiv__')(*args)
def __rfloordiv__(self, *args):
return self._getSpecialAttr('__rfloordiv__')(*args)
def __rtruediv__(self, *args):
return self._getSpecialAttr('__rtruediv__')(*args)
def __rmul__(self, *args):
return self._getSpecialAttr('__rmul__')(*args)
def __rpow__(self, *args):
return self._getSpecialAttr('__rpow__')(*args)
def __rrshift__(self, *args):
return self._getSpecialAttr('__rrshift__')(*args)
def __rlshift__(self, *args):
return self._getSpecialAttr('__rlshift__')(*args)
def __rand__(self, *args):
return self._getSpecialAttr('__rand__')(*args)
def __ror__(self, *args):
return self._getSpecialAttr('__ror__')(*args)
def __rxor__(self, *args):
return self._getSpecialAttr('__ror__')(*args)
def __rmod__(self, *args):
return self._getSpecialAttr('__rmod__')(*args)
def __hash__(self):
## Required for python3 since __eq__ is defined.
return id(self)
class DeferredObjectProxy(ObjectProxy):
def __init__(self, parentProxy, attribute):
## can't set attributes directly because setattr is overridden.
for k in ['_processId', '_typeStr', '_proxyId', '_handler']:
self.__dict__[k] = getattr(parentProxy, k)
self.__dict__['_parent'] = parentProxy butes'] = parentProxy._attributes + (attribute,)
self.__dict__['_proxyOptions'] = parentProxy._proxyOptions.copy()
def __repr__(self):
return ObjectProxy.__repr__(self) + '.' + '.'.join(self._attributes)
def _undefer(self):
return self._parent.__getattr__(self._attributes[-1], _deferGetattr=False)
| true | true |
f73d453812bcfc25c8bbd44be64d84d895cd81c9 | 76 | py | Python | main.py | sebanie15/simple_clinic | 4dc942b0549ee6397a0e89dd7aa03eb8580b4a5a | [
"MIT"
] | null | null | null | main.py | sebanie15/simple_clinic | 4dc942b0549ee6397a0e89dd7aa03eb8580b4a5a | [
"MIT"
] | null | null | null | main.py | sebanie15/simple_clinic | 4dc942b0549ee6397a0e89dd7aa03eb8580b4a5a | [
"MIT"
] | null | null | null | # !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: sebanie15
"""
| 10.857143 | 23 | 0.539474 | true | true | |
f73d45577cc68e12b4eeb5c8da70320946d43fce | 7,327 | py | Python | __init__.py | APRAND/mission | a00532ef1fb6bd1cfc6bbfe1a1ebe3b5a32fead4 | [
"MIT"
] | null | null | null | __init__.py | APRAND/mission | a00532ef1fb6bd1cfc6bbfe1a1ebe3b5a32fead4 | [
"MIT"
] | null | null | null | __init__.py | APRAND/mission | a00532ef1fb6bd1cfc6bbfe1a1ebe3b5a32fead4 | [
"MIT"
] | null | null | null | import os, inspect
from lib import navpy
from util import transformations as tr
from util import SRTM, common, file_tools, mavlink_meta
from shapely.geometry import LineString
from shapely import affinity
# Samuel Dudley
# September 2018
# Mission planning tool for mavlink enabled vehicles
# Setup logging
# generic mission object
class BaseMission(object):
def __init__(self, missionID, takeoffAlt, takoffLoiterTime, outputDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'missions'), logName = 'mission'):
# setup logger
self.logger = common.setupLogger(logName)
# set ouput directory
self.outputDir = outputDir
# TODO: Check to make sure the dir exists
self.latRef = None
self.lonRef = None
self.altRef = None
self.frame = 'ned'
self.points = []
# setup altitude types
self.altitudeTypes = {'relative':3, 'terrain':10}
self.availableFrames = ['ned', 'lla']
self.missionID = missionID
self.takeoffAlt = takeoffAlt
self.takoffLoiterTime = takoffLoiterTime
self.filePath = os.path.join(self.outputDir, self.missionID+'.txt')
self.fid = None
self.missionLine = 0
self.autoContinue = 1
self.mavlinkEnums = mavlink_meta.getMavlinkEnums()
def writeWaypointFile(self, actions):
file_tools.makePath(self.outputDir)
with open(self.filePath, 'w+') as self.fid:
for action in actions:
if inspect.ismethod(action):
action()
else:
self.writeGenericAction(action)
def writeWaypointLine(self, line, newLine = True):
if newLine:
line +="\n"
if not self.fid.closed:
self.fid.write(line)
self.missionLine += 1
else:
# The waypoint file is closed
self.logger.error('Failed to write to waypoint file')
def writeHomeLLA(self):
line = "{0} 0 0 {1} 0.0 0.0 0.0 0.0 {2} {3} {4} {5}".format(self.missionLine, self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
self.latRef, self.lonRef, self.altRef,
self.autoContinue)
self.writeWaypointLine(line)
def writeTakeoffLLA(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_TAKEOFF']['value'],
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeLoiterTime(self):
line = "{0} 0 {1} {2} {3} 0.0 0.0 0.0 {4} {5} {6} {7}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_LOITER_TIME']['value'],
self.takoffLoiterTime,
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeReturnToLaunch(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 0.0 0.0 0.0 {3}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_RETURN_TO_LAUNCH']['value'],
self.autoContinue)
self.writeWaypointLine(line)
def writeWaypointLLA(self, lla):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
lla[0], lla[1], lla[2],
self.autoContinue)
self.writeWaypointLine(line)
def writePreamble(self):
self.writeWaypointLine("QGC WPL 110")
self.missionLine = 0
def writeGenericAction(self, action):
line = "{0} {1}".format(self.missionLine, action)
self.writeWaypointLine(line)
def checkFrame(self):
if self.frame.lower() in self.availableFrames:
return True
else:
return False
def setReferenceLLA(self, LLA=[]):
# TODO: check LLA length
self.latRef = LLA[0]
self.lonRef = LLA[1]
self.altRef = LLA[2]
sss = SRTM.NEDGround(lla_ref = LLA , width_m = 10000 , height_m = 10000 , step_m = 30, logger = self.logger)
def setReferenceLatitude(self, lat):
self.latRef = lat
def setReferenceLongitude(self, lon):
self.lonRef = lon
def setReferenceAltitude(self, alt):
self.altRef = alt
def getPointsNED(self, lla):
ned = navpy.lla2ned(lla[0], lla[1], lla[2], lat_ref = self.latRef, lon_ref = self.lonRef, alt_ref = self.altRef)
return list(ned)
def getPointsLLA(self, ned):
lla = navpy.ned2lla(ned, lat_ref = self.latRef, lon_ref= self.lonRef , alt_ref = self.altRef)
return list(lla)
class GridMission(BaseMission):
def __init__(self, missionID, takeoffAlt = 10, takoffLoiterTime = 5, append = False):
super(GridMission, self).__init__(missionID, takeoffAlt, takoffLoiterTime)
self.logger.debug(missionID)
def generateGrid(self, out = 100, right = 50, yaw = 45, alt = 25):
# TODO: dynamically calculate lane width from sensor FoV and alt
laneWidth = 10
points = []
if right < 0:
laneWidth = -laneWidth
for k in range(0, 50, 2):
points.append((0, k*laneWidth))
points.append((out, k*laneWidth))
points.append((out, (k+1)*laneWidth))
points.append((0, (k+1)*laneWidth))
if abs(laneWidth*(k+1)) > abs(right) :
break;
line = LineString(points)
# line = affinity.rotate(line, angle=yaw, origin=list(line.coords)[0], use_radians=False)
llas = [self.getPointsLLA([point[0], point[1], 0]) for point in list(line.coords)]
for lla in llas:
self.writeWaypointLLA([lla[0], lla[1], alt])
if __name__ == '__main__':
mission = GridMission('grid_test')
mission.setReferenceLLA([-35.3615074158, 149.163650513, 500])
actions = [mission.writePreamble,
mission.writeHomeLLA,
mission.writeTakeoffLLA,
mission.writeLoiterTime,
mission.generateGrid,
mission.writeReturnToLaunch]
mission.writeWaypointFile(actions)
| 43.1 | 168 | 0.538965 | import os, inspect
from lib import navpy
from util import transformations as tr
from util import SRTM, common, file_tools, mavlink_meta
from shapely.geometry import LineString
from shapely import affinity
class BaseMission(object):
def __init__(self, missionID, takeoffAlt, takoffLoiterTime, outputDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'missions'), logName = 'mission'):
self.logger = common.setupLogger(logName)
self.outputDir = outputDir
self.latRef = None
self.lonRef = None
self.altRef = None
self.frame = 'ned'
self.points = []
self.altitudeTypes = {'relative':3, 'terrain':10}
self.availableFrames = ['ned', 'lla']
self.missionID = missionID
self.takeoffAlt = takeoffAlt
self.takoffLoiterTime = takoffLoiterTime
self.filePath = os.path.join(self.outputDir, self.missionID+'.txt')
self.fid = None
self.missionLine = 0
self.autoContinue = 1
self.mavlinkEnums = mavlink_meta.getMavlinkEnums()
def writeWaypointFile(self, actions):
file_tools.makePath(self.outputDir)
with open(self.filePath, 'w+') as self.fid:
for action in actions:
if inspect.ismethod(action):
action()
else:
self.writeGenericAction(action)
def writeWaypointLine(self, line, newLine = True):
if newLine:
line +="\n"
if not self.fid.closed:
self.fid.write(line)
self.missionLine += 1
else:
self.logger.error('Failed to write to waypoint file')
def writeHomeLLA(self):
line = "{0} 0 0 {1} 0.0 0.0 0.0 0.0 {2} {3} {4} {5}".format(self.missionLine, self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
self.latRef, self.lonRef, self.altRef,
self.autoContinue)
self.writeWaypointLine(line)
def writeTakeoffLLA(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_TAKEOFF']['value'],
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeLoiterTime(self):
line = "{0} 0 {1} {2} {3} 0.0 0.0 0.0 {4} {5} {6} {7}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_LOITER_TIME']['value'],
self.takoffLoiterTime,
self.latRef, self.lonRef, self.takeoffAlt,
self.autoContinue)
self.writeWaypointLine(line)
def writeReturnToLaunch(self):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 0.0 0.0 0.0 {3}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_RETURN_TO_LAUNCH']['value'],
self.autoContinue)
self.writeWaypointLine(line)
def writeWaypointLLA(self, lla):
line = "{0} 0 {1} {2} 0.0 0.0 0.0 0.0 {3} {4} {5} {6}".format(self.missionLine, self.mavlinkEnums['MAV_FRAME']['GLOBAL_RELATIVE_ALT']['value'],
self.mavlinkEnums['MAV_CMD']['NAV_WAYPOINT']['value'],
lla[0], lla[1], lla[2],
self.autoContinue)
self.writeWaypointLine(line)
def writePreamble(self):
self.writeWaypointLine("QGC WPL 110")
self.missionLine = 0
def writeGenericAction(self, action):
line = "{0} {1}".format(self.missionLine, action)
self.writeWaypointLine(line)
def checkFrame(self):
if self.frame.lower() in self.availableFrames:
return True
else:
return False
def setReferenceLLA(self, LLA=[]):
self.latRef = LLA[0]
self.lonRef = LLA[1]
self.altRef = LLA[2]
sss = SRTM.NEDGround(lla_ref = LLA , width_m = 10000 , height_m = 10000 , step_m = 30, logger = self.logger)
def setReferenceLatitude(self, lat):
self.latRef = lat
def setReferenceLongitude(self, lon):
self.lonRef = lon
def setReferenceAltitude(self, alt):
self.altRef = alt
def getPointsNED(self, lla):
ned = navpy.lla2ned(lla[0], lla[1], lla[2], lat_ref = self.latRef, lon_ref = self.lonRef, alt_ref = self.altRef)
return list(ned)
def getPointsLLA(self, ned):
lla = navpy.ned2lla(ned, lat_ref = self.latRef, lon_ref= self.lonRef , alt_ref = self.altRef)
return list(lla)
class GridMission(BaseMission):
def __init__(self, missionID, takeoffAlt = 10, takoffLoiterTime = 5, append = False):
super(GridMission, self).__init__(missionID, takeoffAlt, takoffLoiterTime)
self.logger.debug(missionID)
def generateGrid(self, out = 100, right = 50, yaw = 45, alt = 25):
laneWidth = 10
points = []
if right < 0:
laneWidth = -laneWidth
for k in range(0, 50, 2):
points.append((0, k*laneWidth))
points.append((out, k*laneWidth))
points.append((out, (k+1)*laneWidth))
points.append((0, (k+1)*laneWidth))
if abs(laneWidth*(k+1)) > abs(right) :
break;
line = LineString(points)
llas = [self.getPointsLLA([point[0], point[1], 0]) for point in list(line.coords)]
for lla in llas:
self.writeWaypointLLA([lla[0], lla[1], alt])
if __name__ == '__main__':
mission = GridMission('grid_test')
mission.setReferenceLLA([-35.3615074158, 149.163650513, 500])
actions = [mission.writePreamble,
mission.writeHomeLLA,
mission.writeTakeoffLLA,
mission.writeLoiterTime,
mission.generateGrid,
mission.writeReturnToLaunch]
mission.writeWaypointFile(actions)
| true | true |
f73d45bb1ac18fafbdb84aa1438a166e3cb7f30c | 3,727 | py | Python | src/GameManager/gui/subscreen.py | maccam912/Oddyssey | a9d1eca9ea1dfabd9873eb842eae03f2ed83d405 | [
"MIT"
] | null | null | null | src/GameManager/gui/subscreen.py | maccam912/Oddyssey | a9d1eca9ea1dfabd9873eb842eae03f2ed83d405 | [
"MIT"
] | 11 | 2017-03-12T13:59:21.000Z | 2017-03-24T04:42:16.000Z | src/GameManager/gui/subscreen.py | maccam912/Oddyssey | a9d1eca9ea1dfabd9873eb842eae03f2ed83d405 | [
"MIT"
] | null | null | null | import numpy as np
class SubScreen():
def __init__(self, x, y, width, height, curses):
self.x = x
self.y = y
self.width = width
self.height = height
self.curses = curses
def put_char(self, x, y, char=' ', foreground='white', background='transparent'):
if x < self.width and x >= self.x and y < self.height and y >= self.y:
self.curses.put_char(self.x + x, self.y + y, char, foreground, background)
else:
raise ValueError('Error: Out of SubScreen boundary.')
def put_message(self, x, y , message, foreground='white', background='transparent', auto=True, align='left'):
self.curses.put_message(self.x + x, self.y + y , message, foreground, background, auto, align, box_x=self.x, box_y=self.y, box_width=self.width, box_height=self.height)
def fill_char(self, char=' ', foreground='white', background='transparent'):
for i in range(self.x, self.x + self.width):
for j in range(self.y, self.y + self.height):
self.curses.put_char(i, j, char, foreground, background)
class MessageScreen(SubScreen):
def __init__(self, x, y, width, height, curses):
super(MessageScreen, self).__init__(x, y, width, height, curses)
self.initialization()
def initialization(self):
self.message_id = 1
self.message_size = self.height
self.message_storage = ['']*self.message_size
self.color_storage = ['transparent']*self.message_size
self.idx_storage = ['']*self.message_size
def add_message(self, message, color='white'):
idx = '[%d] '%(self.message_id)
message = message
self.message_id += 1
self.message_storage.append(message)
self.color_storage.append(color)
self.idx_storage.append(idx)
self.message_storage.pop(0)
self.color_storage.pop(0)
self.idx_storage.pop(0)
def draw(self):
self.fill_char()
for i in range(len(self.message_storage)):
self.put_message(0, i, self.idx_storage[i], foreground='white', background='transparent', auto=True, align='left')
self.put_message(len(self.idx_storage[i]), i , self.message_storage[i], foreground=self.color_storage[i], background='transparent', auto=True, align='left')
class PlayerInfoScreen(SubScreen):
def __init__(self, x, y, width, height, curses, player):
super(PlayerInfoScreen, self).__init__(x, y, width, height, curses)
self.player = player
self.initialization()
def initialization(self):
self.full_health_bar_length = 15
self.draw()
def draw(self):
# Draw background
self.fill_char(char='█', foreground='peru', background='transparent')
# Draw HP bar
health = self.player.current_health
interval = self.player.health / self.full_health_bar_length / 3
level = int(np.ceil(health / interval))
health_title = 'HP '
if level % 3 == 0:
remainder = ''
elif level % 3 == 1:
remainder = '░'
elif level % 3 == 2:
remainder = '▒'
health_message = '█' * int((level - level%3)/3) + remainder
self.put_message(0, 0, health_title, foreground='red', background='peru', auto=True, align='left')
self.put_message(len(health_title), 0, ' '*self.full_health_bar_length, foreground='red', background='transparent', auto=True, align='left')
self.put_message(len(health_title), 0, health_message, foreground='red', background='transparent', auto=True, align='left')
| 43.337209 | 176 | 0.611215 | import numpy as np
class SubScreen():
def __init__(self, x, y, width, height, curses):
self.x = x
self.y = y
self.width = width
self.height = height
self.curses = curses
def put_char(self, x, y, char=' ', foreground='white', background='transparent'):
if x < self.width and x >= self.x and y < self.height and y >= self.y:
self.curses.put_char(self.x + x, self.y + y, char, foreground, background)
else:
raise ValueError('Error: Out of SubScreen boundary.')
def put_message(self, x, y , message, foreground='white', background='transparent', auto=True, align='left'):
self.curses.put_message(self.x + x, self.y + y , message, foreground, background, auto, align, box_x=self.x, box_y=self.y, box_width=self.width, box_height=self.height)
def fill_char(self, char=' ', foreground='white', background='transparent'):
for i in range(self.x, self.x + self.width):
for j in range(self.y, self.y + self.height):
self.curses.put_char(i, j, char, foreground, background)
class MessageScreen(SubScreen):
def __init__(self, x, y, width, height, curses):
super(MessageScreen, self).__init__(x, y, width, height, curses)
self.initialization()
def initialization(self):
self.message_id = 1
self.message_size = self.height
self.message_storage = ['']*self.message_size
self.color_storage = ['transparent']*self.message_size
self.idx_storage = ['']*self.message_size
def add_message(self, message, color='white'):
idx = '[%d] '%(self.message_id)
message = message
self.message_id += 1
self.message_storage.append(message)
self.color_storage.append(color)
self.idx_storage.append(idx)
self.message_storage.pop(0)
self.color_storage.pop(0)
self.idx_storage.pop(0)
def draw(self):
self.fill_char()
for i in range(len(self.message_storage)):
self.put_message(0, i, self.idx_storage[i], foreground='white', background='transparent', auto=True, align='left')
self.put_message(len(self.idx_storage[i]), i , self.message_storage[i], foreground=self.color_storage[i], background='transparent', auto=True, align='left')
class PlayerInfoScreen(SubScreen):
def __init__(self, x, y, width, height, curses, player):
super(PlayerInfoScreen, self).__init__(x, y, width, height, curses)
self.player = player
self.initialization()
def initialization(self):
self.full_health_bar_length = 15
self.draw()
def draw(self):
self.fill_char(char='█', foreground='peru', background='transparent')
health = self.player.current_health
interval = self.player.health / self.full_health_bar_length / 3
level = int(np.ceil(health / interval))
health_title = 'HP '
if level % 3 == 0:
remainder = ''
elif level % 3 == 1:
remainder = '░'
elif level % 3 == 2:
remainder = '▒'
health_message = '█' * int((level - level%3)/3) + remainder
self.put_message(0, 0, health_title, foreground='red', background='peru', auto=True, align='left')
self.put_message(len(health_title), 0, ' '*self.full_health_bar_length, foreground='red', background='transparent', auto=True, align='left')
self.put_message(len(health_title), 0, health_message, foreground='red', background='transparent', auto=True, align='left')
| true | true |
f73d464c30b19ff9737340f814e7fc6023b698ec | 13,597 | py | Python | src/oci/database/models/launch_db_system_from_db_system_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database/models/launch_db_system_from_db_system_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database/models/launch_db_system_from_db_system_details.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .launch_db_system_base import LaunchDbSystemBase
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class LaunchDbSystemFromDbSystemDetails(LaunchDbSystemBase):
"""
Used for creating a new database system by cloning an existing DB system.
"""
#: A constant which can be used with the license_model property of a LaunchDbSystemFromDbSystemDetails.
#: This constant has a value of "LICENSE_INCLUDED"
LICENSE_MODEL_LICENSE_INCLUDED = "LICENSE_INCLUDED"
#: A constant which can be used with the license_model property of a LaunchDbSystemFromDbSystemDetails.
#: This constant has a value of "BRING_YOUR_OWN_LICENSE"
LICENSE_MODEL_BRING_YOUR_OWN_LICENSE = "BRING_YOUR_OWN_LICENSE"
def __init__(self, **kwargs):
"""
Initializes a new LaunchDbSystemFromDbSystemDetails object with values from keyword arguments. The default value of the :py:attr:`~oci.database.models.LaunchDbSystemFromDbSystemDetails.source` attribute
of this class is ``DB_SYSTEM`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param compartment_id:
The value to assign to the compartment_id property of this LaunchDbSystemFromDbSystemDetails.
:type compartment_id: str
:param fault_domains:
The value to assign to the fault_domains property of this LaunchDbSystemFromDbSystemDetails.
:type fault_domains: list[str]
:param display_name:
The value to assign to the display_name property of this LaunchDbSystemFromDbSystemDetails.
:type display_name: str
:param availability_domain:
The value to assign to the availability_domain property of this LaunchDbSystemFromDbSystemDetails.
:type availability_domain: str
:param subnet_id:
The value to assign to the subnet_id property of this LaunchDbSystemFromDbSystemDetails.
:type subnet_id: str
:param backup_subnet_id:
The value to assign to the backup_subnet_id property of this LaunchDbSystemFromDbSystemDetails.
:type backup_subnet_id: str
:param nsg_ids:
The value to assign to the nsg_ids property of this LaunchDbSystemFromDbSystemDetails.
:type nsg_ids: list[str]
:param backup_network_nsg_ids:
The value to assign to the backup_network_nsg_ids property of this LaunchDbSystemFromDbSystemDetails.
:type backup_network_nsg_ids: list[str]
:param shape:
The value to assign to the shape property of this LaunchDbSystemFromDbSystemDetails.
:type shape: str
:param time_zone:
The value to assign to the time_zone property of this LaunchDbSystemFromDbSystemDetails.
:type time_zone: str
:param db_system_options:
The value to assign to the db_system_options property of this LaunchDbSystemFromDbSystemDetails.
:type db_system_options: oci.database.models.DbSystemOptions
:param sparse_diskgroup:
The value to assign to the sparse_diskgroup property of this LaunchDbSystemFromDbSystemDetails.
:type sparse_diskgroup: bool
:param ssh_public_keys:
The value to assign to the ssh_public_keys property of this LaunchDbSystemFromDbSystemDetails.
:type ssh_public_keys: list[str]
:param hostname:
The value to assign to the hostname property of this LaunchDbSystemFromDbSystemDetails.
:type hostname: str
:param domain:
The value to assign to the domain property of this LaunchDbSystemFromDbSystemDetails.
:type domain: str
:param cpu_core_count:
The value to assign to the cpu_core_count property of this LaunchDbSystemFromDbSystemDetails.
:type cpu_core_count: int
:param cluster_name:
The value to assign to the cluster_name property of this LaunchDbSystemFromDbSystemDetails.
:type cluster_name: str
:param data_storage_percentage:
The value to assign to the data_storage_percentage property of this LaunchDbSystemFromDbSystemDetails.
:type data_storage_percentage: int
:param initial_data_storage_size_in_gb:
The value to assign to the initial_data_storage_size_in_gb property of this LaunchDbSystemFromDbSystemDetails.
:type initial_data_storage_size_in_gb: int
:param kms_key_id:
The value to assign to the kms_key_id property of this LaunchDbSystemFromDbSystemDetails.
:type kms_key_id: str
:param kms_key_version_id:
The value to assign to the kms_key_version_id property of this LaunchDbSystemFromDbSystemDetails.
:type kms_key_version_id: str
:param node_count:
The value to assign to the node_count property of this LaunchDbSystemFromDbSystemDetails.
:type node_count: int
:param freeform_tags:
The value to assign to the freeform_tags property of this LaunchDbSystemFromDbSystemDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this LaunchDbSystemFromDbSystemDetails.
:type defined_tags: dict(str, dict(str, object))
:param source:
The value to assign to the source property of this LaunchDbSystemFromDbSystemDetails.
Allowed values for this property are: "NONE", "DB_BACKUP", "DATABASE", "DB_SYSTEM"
:type source: str
:param private_ip:
The value to assign to the private_ip property of this LaunchDbSystemFromDbSystemDetails.
:type private_ip: str
:param source_db_system_id:
The value to assign to the source_db_system_id property of this LaunchDbSystemFromDbSystemDetails.
:type source_db_system_id: str
:param db_home:
The value to assign to the db_home property of this LaunchDbSystemFromDbSystemDetails.
:type db_home: oci.database.models.CreateDbHomeFromDbSystemDetails
:param license_model:
The value to assign to the license_model property of this LaunchDbSystemFromDbSystemDetails.
Allowed values for this property are: "LICENSE_INCLUDED", "BRING_YOUR_OWN_LICENSE"
:type license_model: str
"""
self.swagger_types = {
'compartment_id': 'str',
'fault_domains': 'list[str]',
'display_name': 'str',
'availability_domain': 'str',
'subnet_id': 'str',
'backup_subnet_id': 'str',
'nsg_ids': 'list[str]',
'backup_network_nsg_ids': 'list[str]',
'shape': 'str',
'time_zone': 'str',
'db_system_options': 'DbSystemOptions',
'sparse_diskgroup': 'bool',
'ssh_public_keys': 'list[str]',
'hostname': 'str',
'domain': 'str',
'cpu_core_count': 'int',
'cluster_name': 'str',
'data_storage_percentage': 'int',
'initial_data_storage_size_in_gb': 'int',
'kms_key_id': 'str',
'kms_key_version_id': 'str',
'node_count': 'int',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'source': 'str',
'private_ip': 'str',
'source_db_system_id': 'str',
'db_home': 'CreateDbHomeFromDbSystemDetails',
'license_model': 'str'
}
self.attribute_map = {
'compartment_id': 'compartmentId',
'fault_domains': 'faultDomains',
'display_name': 'displayName',
'availability_domain': 'availabilityDomain',
'subnet_id': 'subnetId',
'backup_subnet_id': 'backupSubnetId',
'nsg_ids': 'nsgIds',
'backup_network_nsg_ids': 'backupNetworkNsgIds',
'shape': 'shape',
'time_zone': 'timeZone',
'db_system_options': 'dbSystemOptions',
'sparse_diskgroup': 'sparseDiskgroup',
'ssh_public_keys': 'sshPublicKeys',
'hostname': 'hostname',
'domain': 'domain',
'cpu_core_count': 'cpuCoreCount',
'cluster_name': 'clusterName',
'data_storage_percentage': 'dataStoragePercentage',
'initial_data_storage_size_in_gb': 'initialDataStorageSizeInGB',
'kms_key_id': 'kmsKeyId',
'kms_key_version_id': 'kmsKeyVersionId',
'node_count': 'nodeCount',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'source': 'source',
'private_ip': 'privateIp',
'source_db_system_id': 'sourceDbSystemId',
'db_home': 'dbHome',
'license_model': 'licenseModel'
}
self._compartment_id = None
self._fault_domains = None
self._display_name = None
self._availability_domain = None
self._subnet_id = None
self._backup_subnet_id = None
self._nsg_ids = None
self._backup_network_nsg_ids = None
self._shape = None
self._time_zone = None
self._db_system_options = None
self._sparse_diskgroup = None
self._ssh_public_keys = None
self._hostname = None
self._domain = None
self._cpu_core_count = None
self._cluster_name = None
self._data_storage_percentage = None
self._initial_data_storage_size_in_gb = None
self._kms_key_id = None
self._kms_key_version_id = None
self._node_count = None
self._freeform_tags = None
self._defined_tags = None
self._source = None
self._private_ip = None
self._source_db_system_id = None
self._db_home = None
self._license_model = None
self._source = 'DB_SYSTEM'
@property
def source_db_system_id(self):
"""
**[Required]** Gets the source_db_system_id of this LaunchDbSystemFromDbSystemDetails.
The `OCID`__ of the DB system.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The source_db_system_id of this LaunchDbSystemFromDbSystemDetails.
:rtype: str
"""
return self._source_db_system_id
@source_db_system_id.setter
def source_db_system_id(self, source_db_system_id):
"""
Sets the source_db_system_id of this LaunchDbSystemFromDbSystemDetails.
The `OCID`__ of the DB system.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param source_db_system_id: The source_db_system_id of this LaunchDbSystemFromDbSystemDetails.
:type: str
"""
self._source_db_system_id = source_db_system_id
@property
def db_home(self):
"""
**[Required]** Gets the db_home of this LaunchDbSystemFromDbSystemDetails.
:return: The db_home of this LaunchDbSystemFromDbSystemDetails.
:rtype: oci.database.models.CreateDbHomeFromDbSystemDetails
"""
return self._db_home
@db_home.setter
def db_home(self, db_home):
"""
Sets the db_home of this LaunchDbSystemFromDbSystemDetails.
:param db_home: The db_home of this LaunchDbSystemFromDbSystemDetails.
:type: oci.database.models.CreateDbHomeFromDbSystemDetails
"""
self._db_home = db_home
@property
def license_model(self):
"""
Gets the license_model of this LaunchDbSystemFromDbSystemDetails.
The Oracle license model that applies to all the databases on the DB system. The default is LICENSE_INCLUDED.
Allowed values for this property are: "LICENSE_INCLUDED", "BRING_YOUR_OWN_LICENSE"
:return: The license_model of this LaunchDbSystemFromDbSystemDetails.
:rtype: str
"""
return self._license_model
@license_model.setter
def license_model(self, license_model):
"""
Sets the license_model of this LaunchDbSystemFromDbSystemDetails.
The Oracle license model that applies to all the databases on the DB system. The default is LICENSE_INCLUDED.
:param license_model: The license_model of this LaunchDbSystemFromDbSystemDetails.
:type: str
"""
allowed_values = ["LICENSE_INCLUDED", "BRING_YOUR_OWN_LICENSE"]
if not value_allowed_none_or_none_sentinel(license_model, allowed_values):
raise ValueError(
"Invalid value for `license_model`, must be None or one of {0}"
.format(allowed_values)
)
self._license_model = license_model
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 40.58806 | 245 | 0.667132 |
from .launch_db_system_base import LaunchDbSystemBase
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class LaunchDbSystemFromDbSystemDetails(LaunchDbSystemBase):
LICENSE_MODEL_LICENSE_INCLUDED = "LICENSE_INCLUDED"
LICENSE_MODEL_BRING_YOUR_OWN_LICENSE = "BRING_YOUR_OWN_LICENSE"
def __init__(self, **kwargs):
self.swagger_types = {
'compartment_id': 'str',
'fault_domains': 'list[str]',
'display_name': 'str',
'availability_domain': 'str',
'subnet_id': 'str',
'backup_subnet_id': 'str',
'nsg_ids': 'list[str]',
'backup_network_nsg_ids': 'list[str]',
'shape': 'str',
'time_zone': 'str',
'db_system_options': 'DbSystemOptions',
'sparse_diskgroup': 'bool',
'ssh_public_keys': 'list[str]',
'hostname': 'str',
'domain': 'str',
'cpu_core_count': 'int',
'cluster_name': 'str',
'data_storage_percentage': 'int',
'initial_data_storage_size_in_gb': 'int',
'kms_key_id': 'str',
'kms_key_version_id': 'str',
'node_count': 'int',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'source': 'str',
'private_ip': 'str',
'source_db_system_id': 'str',
'db_home': 'CreateDbHomeFromDbSystemDetails',
'license_model': 'str'
}
self.attribute_map = {
'compartment_id': 'compartmentId',
'fault_domains': 'faultDomains',
'display_name': 'displayName',
'availability_domain': 'availabilityDomain',
'subnet_id': 'subnetId',
'backup_subnet_id': 'backupSubnetId',
'nsg_ids': 'nsgIds',
'backup_network_nsg_ids': 'backupNetworkNsgIds',
'shape': 'shape',
'time_zone': 'timeZone',
'db_system_options': 'dbSystemOptions',
'sparse_diskgroup': 'sparseDiskgroup',
'ssh_public_keys': 'sshPublicKeys',
'hostname': 'hostname',
'domain': 'domain',
'cpu_core_count': 'cpuCoreCount',
'cluster_name': 'clusterName',
'data_storage_percentage': 'dataStoragePercentage',
'initial_data_storage_size_in_gb': 'initialDataStorageSizeInGB',
'kms_key_id': 'kmsKeyId',
'kms_key_version_id': 'kmsKeyVersionId',
'node_count': 'nodeCount',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'source': 'source',
'private_ip': 'privateIp',
'source_db_system_id': 'sourceDbSystemId',
'db_home': 'dbHome',
'license_model': 'licenseModel'
}
self._compartment_id = None
self._fault_domains = None
self._display_name = None
self._availability_domain = None
self._subnet_id = None
self._backup_subnet_id = None
self._nsg_ids = None
self._backup_network_nsg_ids = None
self._shape = None
self._time_zone = None
self._db_system_options = None
self._sparse_diskgroup = None
self._ssh_public_keys = None
self._hostname = None
self._domain = None
self._cpu_core_count = None
self._cluster_name = None
self._data_storage_percentage = None
self._initial_data_storage_size_in_gb = None
self._kms_key_id = None
self._kms_key_version_id = None
self._node_count = None
self._freeform_tags = None
self._defined_tags = None
self._source = None
self._private_ip = None
self._source_db_system_id = None
self._db_home = None
self._license_model = None
self._source = 'DB_SYSTEM'
@property
def source_db_system_id(self):
return self._source_db_system_id
@source_db_system_id.setter
def source_db_system_id(self, source_db_system_id):
self._source_db_system_id = source_db_system_id
@property
def db_home(self):
return self._db_home
@db_home.setter
def db_home(self, db_home):
self._db_home = db_home
@property
def license_model(self):
return self._license_model
@license_model.setter
def license_model(self, license_model):
allowed_values = ["LICENSE_INCLUDED", "BRING_YOUR_OWN_LICENSE"]
if not value_allowed_none_or_none_sentinel(license_model, allowed_values):
raise ValueError(
"Invalid value for `license_model`, must be None or one of {0}"
.format(allowed_values)
)
self._license_model = license_model
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f73d4675c3877f27d97fb71562c1364dc1cc2335 | 73 | py | Python | atlasnet_v2/utils/__init__.py | RobinBaumann/TF-AtlasNetV2 | 32bd962407becacc9b9bee6c76c96216818e4c89 | [
"MIT"
] | null | null | null | atlasnet_v2/utils/__init__.py | RobinBaumann/TF-AtlasNetV2 | 32bd962407becacc9b9bee6c76c96216818e4c89 | [
"MIT"
] | null | null | null | atlasnet_v2/utils/__init__.py | RobinBaumann/TF-AtlasNetV2 | 32bd962407becacc9b9bee6c76c96216818e4c89 | [
"MIT"
] | null | null | null | """
Created by Robin Baumann <mail@robin-baumann.com> at 08.06.20.
"""
| 12.166667 | 62 | 0.657534 | true | true | |
f73d493ef3d013bb9c8f0413b28a3fe9217b9a28 | 362 | py | Python | tweet/urls.py | ThatsSoMeta/NotTwitter | c9c1c04d204685aaeeb79d6a29167c374a7071d8 | [
"MIT"
] | 1 | 2021-03-08T18:32:59.000Z | 2021-03-08T18:32:59.000Z | tweet/urls.py | ThatsSoMeta/NotTwitter | c9c1c04d204685aaeeb79d6a29167c374a7071d8 | [
"MIT"
] | null | null | null | tweet/urls.py | ThatsSoMeta/NotTwitter | c9c1c04d204685aaeeb79d6a29167c374a7071d8 | [
"MIT"
] | null | null | null | from django.urls import path
from .views import (
tweet_view,
like_tweet,
unlike_tweet,
tweet_detail_view
)
urlpatterns = [
path('', tweet_view, name='tweet'),
path('<int:id>/', tweet_detail_view, name='tweet'),
path('like/<int:tweet_id>/', like_tweet, name='like'),
path('unlike/<int:tweet_id>/', unlike_tweet, name='unlike'),
] | 25.857143 | 64 | 0.654696 | from django.urls import path
from .views import (
tweet_view,
like_tweet,
unlike_tweet,
tweet_detail_view
)
urlpatterns = [
path('', tweet_view, name='tweet'),
path('<int:id>/', tweet_detail_view, name='tweet'),
path('like/<int:tweet_id>/', like_tweet, name='like'),
path('unlike/<int:tweet_id>/', unlike_tweet, name='unlike'),
] | true | true |
f73d4a21e0337bacf46a3d78d23ef4a85ccce221 | 1,399 | py | Python | sim2net/propagation/__init__.py | harikuts/dsr_optimization | 796e58da578f7841a060233a8981eb69d92b798b | [
"MIT"
] | 12 | 2018-06-17T05:29:35.000Z | 2022-03-20T23:55:49.000Z | sim2net/propagation/__init__.py | harikuts/dsr_optimization | 796e58da578f7841a060233a8981eb69d92b798b | [
"MIT"
] | 2 | 2020-05-02T16:36:34.000Z | 2021-03-12T17:40:02.000Z | sim2net/propagation/__init__.py | harikuts/dsr_optimization | 796e58da578f7841a060233a8981eb69d92b798b | [
"MIT"
] | 6 | 2015-09-09T00:00:22.000Z | 2020-05-29T20:18:31.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Michal Kalewski <mkalewski at cs.put.poznan.pl>
#
# This file is a part of the Simple Network Simulator (sim2net) project.
# USE, MODIFICATION, COPYING AND DISTRIBUTION OF THIS SOFTWARE IS SUBJECT TO
# THE TERMS AND CONDITIONS OF THE MIT LICENSE. YOU SHOULD HAVE RECEIVED A COPY
# OF THE MIT LICENSE ALONG WITH THIS SOFTWARE; IF NOT, YOU CAN DOWNLOAD A COPY
# FROM HTTP://WWW.OPENSOURCE.ORG/.
#
# For bug reports, feature and support requests please visit
# <https://github.com/mkalewski/sim2net/issues>.
"""
This package provides a collection of wireless signal propagation model
classes.
A wireless transmission may be distorted by many effects such as free-space
loss, refraction, diffraction, reflection or absorption. Therefore, wireless
propagation models describe the influence of environment on signal quality
(mainly as a function of frequency, distance or other conditions) and calculate
the **signal-to-noise ratio** (*SNR*) at the receiver. Then, it is assumed
that if the SNR value is higher than some prescribed threshold, the signal can
be received, and the packet that is carried by the signal can be successfully
received if the receiving node remains connected in this way with the sending
node at least for the duration of that packet transmission.
"""
__docformat__ = 'reStructuredText'
__all__ = ['path_loss']
| 39.971429 | 79 | 0.772695 |
__docformat__ = 'reStructuredText'
__all__ = ['path_loss']
| true | true |
f73d4a3d12fa46ae5bf514d98b31d99a0ff147e8 | 1,169 | py | Python | test/test43_tf_official.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | null | null | null | test/test43_tf_official.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | 1 | 2020-07-08T02:57:50.000Z | 2020-07-08T02:57:50.000Z | test/test43_tf_official.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | null | null | null | from models import Linear3
from core.Optimizers import sgd, bgd
from core.Functions import one_hot_f
import numpy as np
from tensorflow import keras
from core.Dataloader import batch_iterator
def test(model, test_inputs, test_labels):
num_of_sample = test_inputs.shape[0]
cnt_correct, cnt_tot = 0, 0
for i in range(num_of_sample):
test_input = test_inputs[i:i + 1]
test_label = test_labels[i]
res = model.forward_prop(test_input)
if np.argmax(res) == np.argmax(test_label):
cnt_correct += 1
cnt_tot += 1
return cnt_correct / cnt_tot
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
train_images = np.expand_dims(train_images / 255, axis=-1)
test_images = np.expand_dims(test_images / 255, axis=-1)
train_labels = one_hot_f(train_labels, num_classes=10)
test_labels = one_hot_f(test_labels, num_classes=10)
Linear3.compile()
Linear3.cuda()
train_iterator = batch_iterator(batch_sz=256)
optimizer = bgd(0.01)
optimizer.fit(Linear3, train_images, train_labels, train_iterator, epoch=50)
Linear3.save('Linear3_cuda')
| 31.594595 | 84 | 0.746792 | from models import Linear3
from core.Optimizers import sgd, bgd
from core.Functions import one_hot_f
import numpy as np
from tensorflow import keras
from core.Dataloader import batch_iterator
def test(model, test_inputs, test_labels):
num_of_sample = test_inputs.shape[0]
cnt_correct, cnt_tot = 0, 0
for i in range(num_of_sample):
test_input = test_inputs[i:i + 1]
test_label = test_labels[i]
res = model.forward_prop(test_input)
if np.argmax(res) == np.argmax(test_label):
cnt_correct += 1
cnt_tot += 1
return cnt_correct / cnt_tot
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
train_images = np.expand_dims(train_images / 255, axis=-1)
test_images = np.expand_dims(test_images / 255, axis=-1)
train_labels = one_hot_f(train_labels, num_classes=10)
test_labels = one_hot_f(test_labels, num_classes=10)
Linear3.compile()
Linear3.cuda()
train_iterator = batch_iterator(batch_sz=256)
optimizer = bgd(0.01)
optimizer.fit(Linear3, train_images, train_labels, train_iterator, epoch=50)
Linear3.save('Linear3_cuda')
| true | true |
f73d4a7c66a667270b365b586bab3d27bc39b886 | 22,957 | py | Python | google/ads/googleads/v7/services/services/bidding_strategy_service/client.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | 285 | 2018-10-05T16:47:58.000Z | 2022-03-31T00:58:39.000Z | google/ads/googleads/v7/services/services/bidding_strategy_service/client.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | 425 | 2018-09-10T13:32:41.000Z | 2022-03-31T14:50:05.000Z | google/ads/googleads/v7/services/services/bidding_strategy_service/client.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | 369 | 2018-11-28T07:01:00.000Z | 2022-03-28T09:53:22.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v7.resources.types import bidding_strategy
from google.ads.googleads.v7.services.types import bidding_strategy_service
from google.rpc import status_pb2 as status # type: ignore
from .transports.base import (
BiddingStrategyServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import BiddingStrategyServiceGrpcTransport
class BiddingStrategyServiceClientMeta(type):
"""Metaclass for the BiddingStrategyService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[BiddingStrategyServiceTransport]]
_transport_registry["grpc"] = BiddingStrategyServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[BiddingStrategyServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class BiddingStrategyServiceClient(metaclass=BiddingStrategyServiceClientMeta):
"""Service to manage bidding strategies."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
BiddingStrategyServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
BiddingStrategyServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> BiddingStrategyServiceTransport:
"""Return the transport used by the client instance.
Returns:
BiddingStrategyServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def bidding_strategy_path(
customer_id: str, bidding_strategy_id: str,
) -> str:
"""Return a fully-qualified bidding_strategy string."""
return "customers/{customer_id}/biddingStrategies/{bidding_strategy_id}".format(
customer_id=customer_id, bidding_strategy_id=bidding_strategy_id,
)
@staticmethod
def parse_bidding_strategy_path(path: str) -> Dict[str, str]:
"""Parse a bidding_strategy path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/biddingStrategies/(?P<bidding_strategy_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[credentials.Credentials] = None,
transport: Union[str, BiddingStrategyServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the bidding strategy service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.BiddingStrategyServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, BiddingStrategyServiceTransport):
# transport is a BiddingStrategyServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = BiddingStrategyServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_bidding_strategy(
self,
request: bidding_strategy_service.GetBiddingStrategyRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> bidding_strategy.BiddingStrategy:
r"""Returns the requested bidding strategy in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v7.services.types.GetBiddingStrategyRequest`):
The request object.
Request message for
[BiddingStrategyService.GetBiddingStrategy][google.ads.googleads.v7.services.BiddingStrategyService.GetBiddingStrategy].
resource_name (:class:`str`):
Required. The resource name of the
bidding strategy to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v7.resources.types.BiddingStrategy:
A bidding strategy.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a bidding_strategy_service.GetBiddingStrategyRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, bidding_strategy_service.GetBiddingStrategyRequest
):
request = bidding_strategy_service.GetBiddingStrategyRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_bidding_strategy
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def mutate_bidding_strategies(
self,
request: bidding_strategy_service.MutateBiddingStrategiesRequest = None,
*,
customer_id: str = None,
operations: Sequence[
bidding_strategy_service.BiddingStrategyOperation
] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> bidding_strategy_service.MutateBiddingStrategiesResponse:
r"""Creates, updates, or removes bidding strategies. Operation
statuses are returned.
List of thrown errors: `AdxError <>`__
`AuthenticationError <>`__ `AuthorizationError <>`__
`BiddingError <>`__ `BiddingStrategyError <>`__
`ContextError <>`__ `DatabaseError <>`__ `DateError <>`__
`DistinctError <>`__ `FieldError <>`__ `FieldMaskError <>`__
`HeaderError <>`__ `IdError <>`__ `InternalError <>`__
`MutateError <>`__ `NewResourceCreationError <>`__
`NotEmptyError <>`__ `NullError <>`__
`OperationAccessDeniedError <>`__ `OperatorError <>`__
`QuotaError <>`__ `RangeError <>`__ `RequestError <>`__
`SizeLimitError <>`__ `StringFormatError <>`__
`StringLengthError <>`__
Args:
request (:class:`google.ads.googleads.v7.services.types.MutateBiddingStrategiesRequest`):
The request object.
Request message for
[BiddingStrategyService.MutateBiddingStrategies][google.ads.googleads.v7.services.BiddingStrategyService.MutateBiddingStrategies].
customer_id (:class:`str`):
Required. The ID of the customer
whose bidding strategies are being
modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (:class:`Sequence[google.ads.googleads.v7.services.types.BiddingStrategyOperation]`):
Required. The list of operations to
perform on individual bidding
strategies.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v7.services.types.MutateBiddingStrategiesResponse:
Response message for bidding strategy
mutate.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([customer_id, operations]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a bidding_strategy_service.MutateBiddingStrategiesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, bidding_strategy_service.MutateBiddingStrategiesRequest
):
request = bidding_strategy_service.MutateBiddingStrategiesRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_bidding_strategies
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("BiddingStrategyServiceClient",)
| 41.588768 | 146 | 0.633271 |
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials
from google.auth.transport import mtls
from google.auth.transport.grpc import SslCredentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
from google.ads.googleads.v7.resources.types import bidding_strategy
from google.ads.googleads.v7.services.types import bidding_strategy_service
from google.rpc import status_pb2 as status
from .transports.base import (
BiddingStrategyServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import BiddingStrategyServiceGrpcTransport
class BiddingStrategyServiceClientMeta(type):
_transport_registry = (
OrderedDict()
)
_transport_registry["grpc"] = BiddingStrategyServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[BiddingStrategyServiceTransport]:
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values()))
class BiddingStrategyServiceClient(metaclass=BiddingStrategyServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> BiddingStrategyServiceTransport:
return self._transport
@staticmethod
def bidding_strategy_path(
customer_id: str, bidding_strategy_id: str,
) -> str:
return "customers/{customer_id}/biddingStrategies/{bidding_strategy_id}".format(
customer_id=customer_id, bidding_strategy_id=bidding_strategy_id,
)
@staticmethod
def parse_bidding_strategy_path(path: str) -> Dict[str, str]:
m = re.match(
r"^customers/(?P<customer_id>.+?)/biddingStrategies/(?P<bidding_strategy_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[credentials.Credentials] = None,
transport: Union[str, BiddingStrategyServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
if isinstance(transport, BiddingStrategyServiceTransport):
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = BiddingStrategyServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_bidding_strategy(
self,
request: bidding_strategy_service.GetBiddingStrategyRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> bidding_strategy.BiddingStrategy:
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(
request, bidding_strategy_service.GetBiddingStrategyRequest
):
request = bidding_strategy_service.GetBiddingStrategyRequest(
request
)
if resource_name is not None:
request.resource_name = resource_name
rpc = self._transport._wrapped_methods[
self._transport.get_bidding_strategy
]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
return response
def mutate_bidding_strategies(
self,
request: bidding_strategy_service.MutateBiddingStrategiesRequest = None,
*,
customer_id: str = None,
operations: Sequence[
bidding_strategy_service.BiddingStrategyOperation
] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> bidding_strategy_service.MutateBiddingStrategiesResponse:
if request is not None and any([customer_id, operations]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(
request, bidding_strategy_service.MutateBiddingStrategiesRequest
):
request = bidding_strategy_service.MutateBiddingStrategiesRequest(
request
)
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
rpc = self._transport._wrapped_methods[
self._transport.mutate_bidding_strategies
]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
return response
__all__ = ("BiddingStrategyServiceClient",)
| true | true |
f73d4b482fd19575a1dadbb4b98d39c28bb1748b | 2,428 | py | Python | middleware.py | sesh/django-middleware | 34715542c0fa2a85df9f888e8b448f100da7639b | [
"Unlicense"
] | null | null | null | middleware.py | sesh/django-middleware | 34715542c0fa2a85df9f888e8b448f100da7639b | [
"Unlicense"
] | null | null | null | middleware.py | sesh/django-middleware | 34715542c0fa2a85df9f888e8b448f100da7639b | [
"Unlicense"
] | null | null | null | """
This is free and unencumbered software released into the public domain.
https://github.com/sesh/django-middleware
"""
import logging
logger = logging.getLogger("django")
def set_remote_addr(get_response):
def middleware(request):
request.META["REMOTE_ADDR"] = request.META.get(
"HTTP_X_REAL_IP", request.META["REMOTE_ADDR"]
)
response = get_response(request)
return response
return middleware
def permissions_policy(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Permissions-Policy"
] = "interest-cohort=(),microphone=(),camera=(),autoplay=()"
return response
return middleware
def csp(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Content-Security-Policy"
] = "default-src 'none'; script-src 'self'; style-src 'self'; img-src 'self'; child-src 'self'; form-action 'self'"
return response
return middleware
def xss_protect(get_response):
def middleware(request):
response = get_response(request)
response.headers["X-XSS-Protection"] = "1; mode=block"
return response
return middleware
def expect_ct(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Expect-CT"
] = 'enforce, max-age=30m'
return response
return middleware
def cache(get_response):
def middleware(request):
response = get_response(request)
if (
request.method in ["GET", "HEAD"]
and "Cache-Control" not in response.headers
):
response.headers["Cache-Control"] = "max-age=10"
return response
return middleware
def corp_coop_coep(get_response):
def middleware(request):
response = get_response(request)
response.headers["Cross-Origin-Resource-Policy"] = "same-origin"
response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
return response
return middleware
def dns_prefetch(get_response):
def middleware(request):
response = get_response(request)
response.headers["X-DNS-Prefetch-Control"] = "off"
return response
return middleware
| 25.291667 | 123 | 0.646211 |
import logging
logger = logging.getLogger("django")
def set_remote_addr(get_response):
def middleware(request):
request.META["REMOTE_ADDR"] = request.META.get(
"HTTP_X_REAL_IP", request.META["REMOTE_ADDR"]
)
response = get_response(request)
return response
return middleware
def permissions_policy(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Permissions-Policy"
] = "interest-cohort=(),microphone=(),camera=(),autoplay=()"
return response
return middleware
def csp(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Content-Security-Policy"
] = "default-src 'none'; script-src 'self'; style-src 'self'; img-src 'self'; child-src 'self'; form-action 'self'"
return response
return middleware
def xss_protect(get_response):
def middleware(request):
response = get_response(request)
response.headers["X-XSS-Protection"] = "1; mode=block"
return response
return middleware
def expect_ct(get_response):
def middleware(request):
response = get_response(request)
response.headers[
"Expect-CT"
] = 'enforce, max-age=30m'
return response
return middleware
def cache(get_response):
def middleware(request):
response = get_response(request)
if (
request.method in ["GET", "HEAD"]
and "Cache-Control" not in response.headers
):
response.headers["Cache-Control"] = "max-age=10"
return response
return middleware
def corp_coop_coep(get_response):
def middleware(request):
response = get_response(request)
response.headers["Cross-Origin-Resource-Policy"] = "same-origin"
response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
return response
return middleware
def dns_prefetch(get_response):
def middleware(request):
response = get_response(request)
response.headers["X-DNS-Prefetch-Control"] = "off"
return response
return middleware
| true | true |
f73d4b5b57293171dab37f88fec8f771af568ea4 | 735 | py | Python | swaggerpy/exception.py | bkbarry/swagger-py | 381b6ab2ac64ee52148c2d99e7cd2e49e8b5addd | [
"BSD-3-Clause"
] | null | null | null | swaggerpy/exception.py | bkbarry/swagger-py | 381b6ab2ac64ee52148c2d99e7cd2e49e8b5addd | [
"BSD-3-Clause"
] | null | null | null | swaggerpy/exception.py | bkbarry/swagger-py | 381b6ab2ac64ee52148c2d99e7cd2e49e8b5addd | [
"BSD-3-Clause"
] | 1 | 2021-03-29T08:07:59.000Z | 2021-03-29T08:07:59.000Z | #!/usr/bin/env python
#
# Copyright (c) 2014, Yelp, Inc.
#
class HTTPError(IOError):
"""Initialize HTTPError with 'response' and 'request' object
"""
def __init__(self, *args, **kwargs):
response = kwargs.pop('response', None)
self.response = response
# populate request either from args or from response
self.request = kwargs.pop('request', None)
if(response is not None and not self.request and
hasattr(response, 'request')):
self.request = self.response.request
super(HTTPError, self).__init__(*args, **kwargs)
class CancelledError():
"""Error raised when result() is called from HTTPFuture
and call was actually cancelled
"""
| 28.269231 | 64 | 0.636735 |
class HTTPError(IOError):
def __init__(self, *args, **kwargs):
response = kwargs.pop('response', None)
self.response = response
self.request = kwargs.pop('request', None)
if(response is not None and not self.request and
hasattr(response, 'request')):
self.request = self.response.request
super(HTTPError, self).__init__(*args, **kwargs)
class CancelledError():
| true | true |
f73d4b7b9eeeb895c85c373cde031cd8fcedefda | 2,549 | py | Python | fn_datatable_utils/tests/test_dt_utils_get_row.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 65 | 2017-12-04T13:58:32.000Z | 2022-03-24T18:33:17.000Z | fn_datatable_utils/tests/test_dt_utils_get_row.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 48 | 2018-03-02T19:17:14.000Z | 2022-03-09T22:00:38.000Z | fn_datatable_utils/tests/test_dt_utils_get_row.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 95 | 2018-01-11T16:23:39.000Z | 2022-03-21T11:34:29.000Z | # (c) Copyright IBM Corp. 2019. All Rights Reserved.
# -*- coding: utf-8 -*-
"""Tests using pytest_resilient_circuits"""
from __future__ import print_function
import pytest
from resilient_circuits.util import get_config_data, get_function_definition
from resilient_circuits import SubmitTestFunction, FunctionResult
from tests.test_helper import *
PACKAGE_NAME = "fn_datatable_utils"
FUNCTION_NAME = "dt_utils_get_row"
# Read the default configuration-data section from the package
config_data = get_config_data(PACKAGE_NAME)
# Provide a simulation of the Resilient REST API (uncomment to connect to a real appliance)
resilient_mock = DTResilientMock
def call_dt_utils_get_row_function(circuits, function_params, timeout=10):
# Fire a message to the function
evt = SubmitTestFunction("dt_utils_get_row", function_params)
circuits.manager.fire(evt)
event = circuits.watcher.wait("dt_utils_get_row_result", parent=evt, timeout=timeout)
assert event
assert isinstance(event.kwargs["result"], FunctionResult)
pytest.wait_for(event, "complete", True)
return event.kwargs["result"].value
class TestDtUtilsGetRow:
""" Tests for the dt_utils_get_row function"""
def test_function_definition(self):
""" Test that the package provides customization_data that defines the function """
func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME)
assert func is not None
inputs = {
"incident_id": 1001,
"dt_utils_datatable_api_name": "mock_data_table",
"dt_utils_row_id": None,
"dt_utils_search_column": "dt_col_email",
"dt_utils_search_value": "joe@example.com"
}
output = {
"success": True,
"inputs": inputs,
"row": {
u'id': 1,
u'cells': {
u'dt_col_email': { u'id': u'dt_col_email', u'row_id': 1, u'value': u'joe@example.com' },
u'dt_col_id': { u'id': u'dt_col_id', u'row_id': 1, u'value': 3001 },
u'dt_col_name': {u'id': u'dt_col_name', u'row_id': 1, u'value': u'Joe Blogs' },
u'dt_col_status': {u'id': u'dt_col_status', u'row_id': 1, u'value': u'In Progress'}
}
}
}
@pytest.mark.parametrize("inputs, expected_results", [(inputs, output)])
def test_success(self, circuits_app, inputs, expected_results):
""" Test calling with sample values for the parameters """
results = call_dt_utils_get_row_function(circuits_app, inputs)
assert(expected_results == results)
| 37.485294 | 104 | 0.683798 |
from __future__ import print_function
import pytest
from resilient_circuits.util import get_config_data, get_function_definition
from resilient_circuits import SubmitTestFunction, FunctionResult
from tests.test_helper import *
PACKAGE_NAME = "fn_datatable_utils"
FUNCTION_NAME = "dt_utils_get_row"
config_data = get_config_data(PACKAGE_NAME)
resilient_mock = DTResilientMock
def call_dt_utils_get_row_function(circuits, function_params, timeout=10):
evt = SubmitTestFunction("dt_utils_get_row", function_params)
circuits.manager.fire(evt)
event = circuits.watcher.wait("dt_utils_get_row_result", parent=evt, timeout=timeout)
assert event
assert isinstance(event.kwargs["result"], FunctionResult)
pytest.wait_for(event, "complete", True)
return event.kwargs["result"].value
class TestDtUtilsGetRow:
def test_function_definition(self):
func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME)
assert func is not None
inputs = {
"incident_id": 1001,
"dt_utils_datatable_api_name": "mock_data_table",
"dt_utils_row_id": None,
"dt_utils_search_column": "dt_col_email",
"dt_utils_search_value": "joe@example.com"
}
output = {
"success": True,
"inputs": inputs,
"row": {
u'id': 1,
u'cells': {
u'dt_col_email': { u'id': u'dt_col_email', u'row_id': 1, u'value': u'joe@example.com' },
u'dt_col_id': { u'id': u'dt_col_id', u'row_id': 1, u'value': 3001 },
u'dt_col_name': {u'id': u'dt_col_name', u'row_id': 1, u'value': u'Joe Blogs' },
u'dt_col_status': {u'id': u'dt_col_status', u'row_id': 1, u'value': u'In Progress'}
}
}
}
@pytest.mark.parametrize("inputs, expected_results", [(inputs, output)])
def test_success(self, circuits_app, inputs, expected_results):
results = call_dt_utils_get_row_function(circuits_app, inputs)
assert(expected_results == results)
| true | true |
f73d4c09c135a153cfe3787d0d0c6b2d28e44156 | 111 | py | Python | mldato/timeseries.py | Ryanfras/mldato | 34570412eb909c1d1c99b2a5ee15e7907aa1922b | [
"Apache-2.0"
] | null | null | null | mldato/timeseries.py | Ryanfras/mldato | 34570412eb909c1d1c99b2a5ee15e7907aa1922b | [
"Apache-2.0"
] | 2 | 2021-09-28T03:15:55.000Z | 2022-02-26T08:19:41.000Z | mldato/timeseries.py | Ryanfras/mldato | 34570412eb909c1d1c99b2a5ee15e7907aa1922b | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_timeseries.ipynb (unless otherwise specified).
__all__ = [] | 37 | 97 | 0.756757 |
__all__ = [] | true | true |
f73d4c3922ed02c72cc9a6d52ead9ac8b5941e6b | 728 | py | Python | tests/layers/test_reshape_layer.py | vishalbelsare/neupy | 684313cdaddcad326f2169384fb15ec3aa29d991 | [
"MIT"
] | null | null | null | tests/layers/test_reshape_layer.py | vishalbelsare/neupy | 684313cdaddcad326f2169384fb15ec3aa29d991 | [
"MIT"
] | null | null | null | tests/layers/test_reshape_layer.py | vishalbelsare/neupy | 684313cdaddcad326f2169384fb15ec3aa29d991 | [
"MIT"
] | null | null | null | import numpy as np
from neupy import layers
from base import BaseTestCase
class ReshapeLayerTestCase(BaseTestCase):
def test_reshape_layer_1d_shape(self):
x = np.random.random((5, 4, 3, 2, 1))
input_layer = layers.Input((4, 3, 2, 1))
reshape_layer = layers.Reshape()
input_layer > reshape_layer
y = reshape_layer.output(x).eval()
self.assertEqual(y.shape, (5, 4 * 3 * 2 * 1))
def test_reshape_layer_2d_shape(self):
x = np.random.random((5, 20))
input_layer = layers.Input(20)
reshape_layer = layers.Reshape((4, 5))
input_layer > reshape_layer
y = reshape_layer.output(x).eval()
self.assertEqual(y.shape, (5, 4, 5))
| 26 | 53 | 0.627747 | import numpy as np
from neupy import layers
from base import BaseTestCase
class ReshapeLayerTestCase(BaseTestCase):
def test_reshape_layer_1d_shape(self):
x = np.random.random((5, 4, 3, 2, 1))
input_layer = layers.Input((4, 3, 2, 1))
reshape_layer = layers.Reshape()
input_layer > reshape_layer
y = reshape_layer.output(x).eval()
self.assertEqual(y.shape, (5, 4 * 3 * 2 * 1))
def test_reshape_layer_2d_shape(self):
x = np.random.random((5, 20))
input_layer = layers.Input(20)
reshape_layer = layers.Reshape((4, 5))
input_layer > reshape_layer
y = reshape_layer.output(x).eval()
self.assertEqual(y.shape, (5, 4, 5))
| true | true |
f73d4cb93984c1c76d35fe99d800d82e95c2535f | 479 | py | Python | djangoautoconf/log_utils/log_path_utils.py | weijia/djangoautoconf | 590acfdcc6a3e051a2048ba1dbf980f908a7af91 | [
"BSD-3-Clause"
] | null | null | null | djangoautoconf/log_utils/log_path_utils.py | weijia/djangoautoconf | 590acfdcc6a3e051a2048ba1dbf980f908a7af91 | [
"BSD-3-Clause"
] | null | null | null | djangoautoconf/log_utils/log_path_utils.py | weijia/djangoautoconf | 590acfdcc6a3e051a2048ba1dbf980f908a7af91 | [
"BSD-3-Clause"
] | null | null | null | import os
from ufs_tools.folder_tool import ensure_dir
def get_log_file_path(folder, log_file_name, ext=".log"):
log_folder_relative_path = os.path.join('logs', folder)
log_filename = '%s%s' % (log_file_name, ext)
current_dir = os.path.join(os.getcwd())
folder_log_full_path = os.path.join(current_dir, log_folder_relative_path)
log_full_path = os.path.join(folder_log_full_path, log_filename)
ensure_dir(folder_log_full_path)
return log_full_path
| 34.214286 | 78 | 0.762004 | import os
from ufs_tools.folder_tool import ensure_dir
def get_log_file_path(folder, log_file_name, ext=".log"):
log_folder_relative_path = os.path.join('logs', folder)
log_filename = '%s%s' % (log_file_name, ext)
current_dir = os.path.join(os.getcwd())
folder_log_full_path = os.path.join(current_dir, log_folder_relative_path)
log_full_path = os.path.join(folder_log_full_path, log_filename)
ensure_dir(folder_log_full_path)
return log_full_path
| true | true |
f73d4d2f0ece1918c84c49ba7b4a5afc1b2d4427 | 946 | py | Python | demos/helloworld/MainWindow.py | hsoft/xibless | a7393d28b4a31698869b2203d4d8b3398de1de7f | [
"BSD-3-Clause"
] | 2 | 2016-09-13T12:34:34.000Z | 2018-05-15T01:06:53.000Z | demos/helloworld/MainWindow.py | hsoft/xibless | a7393d28b4a31698869b2203d4d8b3398de1de7f | [
"BSD-3-Clause"
] | null | null | null | demos/helloworld/MainWindow.py | hsoft/xibless | a7393d28b4a31698869b2203d4d8b3398de1de7f | [
"BSD-3-Clause"
] | null | null | null | ownerclass = 'AppDelegate'
ownerimport = 'AppDelegate.h'
# Init
result = Window(330, 110, "Tell me your name!")
result.xProportion = 0.8
result.yProportion = 0.2
result.canResize = False
nameLabel = Label(result, text="Name:")
nameLabel.width = 45
nameField = TextField(result, text="")
helloLabel = Label(result, text="")
button = Button(result, title="Say Hello", action=Action(owner, 'sayHello'))
button.keyEquivalent = "\\r"
# Owner Assignments
owner.nameField = nameField
owner.helloLabel = helloLabel
result.initialFirstResponder = nameField
# Layout
nameLabel.moveTo(Pack.UpperLeft)
nameField.moveNextTo(nameLabel, Pack.Right, Pack.Middle)
nameField.fill(Pack.Right)
helloLabel.moveNextTo(nameLabel, Pack.Below, Pack.Left)
helloLabel.fill(Pack.Right)
button.moveNextTo(helloLabel, Pack.Below, Pack.Right)
nameField.setAnchor(Pack.UpperLeft, growX=True)
helloLabel.setAnchor(Pack.UpperLeft, growX=True)
button.setAnchor(Pack.UpperRight)
| 30.516129 | 76 | 0.77907 | ownerclass = 'AppDelegate'
ownerimport = 'AppDelegate.h'
result = Window(330, 110, "Tell me your name!")
result.xProportion = 0.8
result.yProportion = 0.2
result.canResize = False
nameLabel = Label(result, text="Name:")
nameLabel.width = 45
nameField = TextField(result, text="")
helloLabel = Label(result, text="")
button = Button(result, title="Say Hello", action=Action(owner, 'sayHello'))
button.keyEquivalent = "\\r"
owner.nameField = nameField
owner.helloLabel = helloLabel
result.initialFirstResponder = nameField
nameLabel.moveTo(Pack.UpperLeft)
nameField.moveNextTo(nameLabel, Pack.Right, Pack.Middle)
nameField.fill(Pack.Right)
helloLabel.moveNextTo(nameLabel, Pack.Below, Pack.Left)
helloLabel.fill(Pack.Right)
button.moveNextTo(helloLabel, Pack.Below, Pack.Right)
nameField.setAnchor(Pack.UpperLeft, growX=True)
helloLabel.setAnchor(Pack.UpperLeft, growX=True)
button.setAnchor(Pack.UpperRight)
| true | true |
f73d4d80f07db13a00172fc292bc7b7313bdd9c1 | 904 | py | Python | tests/test.py | rojaster/mdec | 6f338f8838003bab3bf9242063e2531bd5c8e2b4 | [
"MIT"
] | null | null | null | tests/test.py | rojaster/mdec | 6f338f8838003bab3bf9242063e2531bd5c8e2b4 | [
"MIT"
] | null | null | null | tests/test.py | rojaster/mdec | 6f338f8838003bab3bf9242063e2531bd5c8e2b4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import unittest
import subprocess
import tempfile
import requests
import os
TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
class ServicesTest(unittest.TestCase):
@staticmethod
def _test_service(service_name: str, binary_path: str):
print(f'Testing {service_name}')
with open(binary_path, 'rb') as f:
r = requests.post(f'http://127.0.0.1/{service_name}/decompile',
files={'file': f})
if r.status_code != 200:
r.raise_for_status()
def test_all_services(self):
with tempfile.TemporaryDirectory() as working_dir:
src_path = os.path.join(TEST_ROOT, 'src', 'fib.c')
bin_path = os.path.join(working_dir, 'fib')
subprocess.run(['gcc', '-o', bin_path, src_path], check=True)
for service_name in ['angr', 'reko', 'retdec', 'snowman']:
self._test_service(service_name, bin_path)
if __name__ == '__main__':
unittest.main()
| 26.588235 | 66 | 0.700221 |
import unittest
import subprocess
import tempfile
import requests
import os
TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
class ServicesTest(unittest.TestCase):
@staticmethod
def _test_service(service_name: str, binary_path: str):
print(f'Testing {service_name}')
with open(binary_path, 'rb') as f:
r = requests.post(f'http://127.0.0.1/{service_name}/decompile',
files={'file': f})
if r.status_code != 200:
r.raise_for_status()
def test_all_services(self):
with tempfile.TemporaryDirectory() as working_dir:
src_path = os.path.join(TEST_ROOT, 'src', 'fib.c')
bin_path = os.path.join(working_dir, 'fib')
subprocess.run(['gcc', '-o', bin_path, src_path], check=True)
for service_name in ['angr', 'reko', 'retdec', 'snowman']:
self._test_service(service_name, bin_path)
if __name__ == '__main__':
unittest.main()
| true | true |
f73d4dba28d6b5df37f80d6f3e985f8afbf9b82a | 22,809 | py | Python | pint_server/app.py | SUSE-Enceladus/public-cloud-info-service | bd30aece9599189cd60dbff90c142b6b98cc4724 | [
"Apache-2.0"
] | 3 | 2018-10-15T03:08:18.000Z | 2022-01-28T16:23:40.000Z | pint_server/app.py | SUSE-Enceladus/public-cloud-info-service | bd30aece9599189cd60dbff90c142b6b98cc4724 | [
"Apache-2.0"
] | 67 | 2019-01-05T02:49:12.000Z | 2022-03-03T19:43:43.000Z | pint_server/app.py | SUSE-Enceladus/public-cloud-info-service | bd30aece9599189cd60dbff90c142b6b98cc4724 | [
"Apache-2.0"
] | 8 | 2019-01-07T08:42:06.000Z | 2021-05-18T14:00:48.000Z | # Copyright (c) 2021 SUSE LLC
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, contact SUSE LLC.
#
# To contact SUSE about this file by physical or electronic mail,
# you may find current contact information at www.suse.com
import datetime
import re
from decimal import Decimal
from flask import (abort, Flask, jsonify, make_response, request, redirect,
Response)
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import text, or_
from sqlalchemy.exc import DataError
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from xml.dom import minidom
import xml.etree.ElementTree as ET
import pint_server
from pint_server.database import init_db
from pint_server.models import (ImageState, AmazonImagesModel,
OracleImagesModel, AlibabaImagesModel,
MicrosoftImagesModel, GoogleImagesModel,
AmazonServersModel, MicrosoftServersModel,
GoogleServersModel, ServerType,
VersionsModel, MicrosoftRegionMapModel)
app = Flask(__name__)
db_session = init_db()
cors_config = {
"origins": ["*"]
}
CORS(app, resources={
r"/*": cors_config
})
# we don't care about modifications as we are doing DB read only
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
null_to_empty = lambda s : s or ''
REGIONSERVER_SMT_MAP = {
'smt': 'update',
'regionserver': 'region',
'regionserver-sap': 'region',
'regionserver-sles': 'region',
'update': 'update',
'region': 'region'
}
REGIONSERVER_SMT_REVERSED_MAP = {
'update': 'smt',
'region': 'regionserver'
}
PROVIDER_IMAGES_MODEL_MAP = {
'amazon': AmazonImagesModel,
'google': GoogleImagesModel,
'microsoft': MicrosoftImagesModel,
'alibaba': AlibabaImagesModel,
'oracle': OracleImagesModel
}
PROVIDER_SERVERS_MODEL_MAP = {
'amazon': AmazonServersModel,
'google': GoogleServersModel,
'microsoft': MicrosoftServersModel
}
PROVIDER_IMAGES_EXCLUDE_ATTRS = {
'microsoft': ['id']
}
PROVIDER_SERVERS_EXCLUDE_ATTRS = {
'amazon': ['id'],
'google': ['id'],
'microsoft': ['id']
}
SUPPORTED_CATEGORIES = ['images', 'servers']
def get_supported_providers():
versions = VersionsModel.query.with_entities(VersionsModel.tablename)
return list({re.sub('(servers|images)', '', v.tablename) for v in versions})
def get_providers():
"""Get all the providers"""
return [{'name': provider} for provider in get_supported_providers()]
def json_to_xml(json_obj, collection_name, element_name):
if collection_name:
root = ET.Element(collection_name)
for dict in json_obj:
ET.SubElement(root, element_name, dict)
else:
if element_name:
root = ET.Element(element_name, json_obj)
else:
# NOTE(gyee): if neither collection_name and element_name are
# specified, we assume the json_obj has a single key value pair
# with key as the tag and value as the text
tag = list(json_obj.keys())[0]
root = ET.Element(tag)
root.text = json_obj[tag]
parsed = minidom.parseString(
ET.tostring(root, encoding='utf8', method='xml'))
return parsed.toprettyxml(indent=' ')
def get_formatted_dict(obj, extra_attrs=None, exclude_attrs=None):
obj_dict = {}
for attr in obj.__dict__.keys():
# FIXME(gyee): the orignal Pint server does not return the "changeinfo"
# or "urn" attribute if it's empty. So we'll need to do the same here.
# IMHO, I consider that a bug in the original Pint server as we should
# return all attributes regardless whether its empty or not.
if attr.lower() in ['urn', 'changeinfo'] and not obj.__dict__[attr]:
continue
# NOTE: the "shape" attribute will be processed together with "type"
# as it is internal only
if attr.lower() == 'shape':
continue
if exclude_attrs and attr in exclude_attrs:
continue
elif attr[0] == '_':
continue
else:
value = obj.__dict__[attr]
if isinstance(value, Decimal):
obj_dict[attr] = float(value)
elif isinstance(value, ImageState):
obj_dict[attr] = obj.state.value
elif isinstance(value, ServerType):
# NOTE(gyee): we need to reverse map the server type
# to make it backward compatible
if obj.__dict__['shape']:
obj_dict[attr] = "%s-%s" % (
REGIONSERVER_SMT_REVERSED_MAP[obj.type.value],
obj.__dict__['shape'])
else:
obj_dict[attr] = (
REGIONSERVER_SMT_REVERSED_MAP[obj.type.value])
elif isinstance(value, datetime.date):
obj_dict[attr] = value.strftime('%Y%m%d')
else:
obj_dict[attr] = null_to_empty(value)
if extra_attrs:
obj_dict.update(extra_attrs)
return obj_dict
def get_mapped_server_type_for_provider(provider, server_type):
if server_type not in REGIONSERVER_SMT_MAP:
abort(Response('', status=404))
mapped_server_type = REGIONSERVER_SMT_MAP[server_type]
if PROVIDER_SERVERS_MODEL_MAP.get(provider):
server_types_json = get_provider_servers_types(provider)
server_types = [t['name'] for t in server_types_json]
if mapped_server_type not in server_types:
abort(Response('', status=404))
return mapped_server_type
def get_provider_servers_for_type(provider, server_type):
servers = []
mapped_server_type = get_mapped_server_type_for_provider(
provider, server_type)
# NOTE(gyee): currently we don't have DB tables for both Alibaba and
# Oracle servers. In order to maintain compatibility with the
# existing Pint server, we are returning an empty list.
if not PROVIDER_SERVERS_MODEL_MAP.get(provider):
return servers
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].type == mapped_server_type)
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_servers_types(provider):
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.distinct(
PROVIDER_SERVERS_MODEL_MAP[provider].type)
return [{'name': server.type.value} for server in servers]
else:
# NOTE(gyee): currently we don't have DB tables for both Alibaba and
# Oracle servers. In order to maintain compatibility with the
# existing Pint server, we are returning the original server
# types. In the future, if we do decide to create the tables,
# then we can easily add them to PROVIDER_SERVERS_MODEL_MAP.
return [{'name': 'smt'}, {'name': 'regionserver'}]
def get_provider_regions(provider):
if provider == 'microsoft':
return _get_all_azure_regions()
servers = []
images = []
region_list = [] # Combination list
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.with_entities(
PROVIDER_SERVERS_MODEL_MAP[provider].region).distinct(
PROVIDER_SERVERS_MODEL_MAP[provider].region)
if hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region'):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.with_entities(
PROVIDER_IMAGES_MODEL_MAP[provider].region).distinct(
PROVIDER_IMAGES_MODEL_MAP[provider].region)
for server in servers:
if server.region not in region_list:
region_list.append(server.region)
for image in images:
if image.region not in region_list:
region_list.append(image.region)
return [{'name': r } for r in region_list]
def _get_all_azure_regions():
regions = []
environments = MicrosoftRegionMapModel.query.all()
for environment in environments:
if environment.region not in regions:
regions.append(environment.region)
if environment.canonicalname not in regions:
regions.append(environment.canonicalname)
return [{'name': r } for r in sorted(regions)]
def _get_azure_servers(region, server_type=None):
# first lookup canonical name for the given region
environment = MicrosoftRegionMapModel.query.filter(
or_(MicrosoftRegionMapModel.region == region,
MicrosoftRegionMapModel.canonicalname == region)).first()
if not environment:
abort(Response('', status=404))
# then get all the regions with the canonical name
environments = MicrosoftRegionMapModel.query.filter(
MicrosoftRegionMapModel.canonicalname == environment.canonicalname)
# get all the possible names for the region
all_regions = []
for environment in environments:
if environment.region not in all_regions:
all_regions.append(environment.region)
# get all the severs for that region
if server_type:
mapped_server_type = get_mapped_server_type_for_provider(
'microsoft', server_type)
servers = MicrosoftServersModel.query.filter(
MicrosoftServersModel.type == mapped_server_type,
MicrosoftServersModel.region.in_(all_regions))
else:
servers = MicrosoftServersModel.query.filter(
MicrosoftServersModel.region.in_(all_regions))
try:
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get('microsoft')
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
except DataError:
abort(Response('', status=404))
def _get_azure_images_for_region_state(region, state):
# first lookup the environment for the given region
environment = MicrosoftRegionMapModel.query.filter(
or_(MicrosoftRegionMapModel.region == region,
MicrosoftRegionMapModel.canonicalname == region)).first()
if not environment:
abort(Response('', status=404))
# assume the environment is unique per region
environment_name = environment.environment
# now pull all the images that matches the environment and state
if state is None:
images = MicrosoftImagesModel.query.filter(
MicrosoftImagesModel.environment == environment_name)
else:
images = MicrosoftImagesModel.query.filter(
MicrosoftImagesModel.environment == environment_name,
MicrosoftImagesModel.state == state)
extra_attrs = {'region': region}
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get('microsoft')
try:
return [get_formatted_dict(image, extra_attrs=extra_attrs,
exclude_attrs=exclude_attrs)
for image in images]
except DataError:
abort(Response('', status=404))
def get_provider_images_for_region_and_state(provider, region, state):
images = []
if provider == 'microsoft':
return _get_azure_images_for_region_state(region, state)
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if state in ImageState.__members__ and region in region_names:
if (hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region')):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].region == region,
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
else:
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
else:
abort(Response('', status=404))
def get_provider_images_for_state(provider, state):
if state in ImageState.__members__:
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_provider_servers_for_region(provider, region):
servers = []
if provider == 'microsoft':
return _get_azure_servers(region)
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].region == region)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_servers_for_region_and_type(provider, region, server_type):
if provider == 'microsoft':
return _get_azure_servers(region, server_type)
servers = []
mapped_server_type = get_mapped_server_type_for_provider(
provider, server_type)
# NOTE(gyee): for Alibaba and Oracle where we don't have any servers,
# we are returning an empty list to be backward compatible.
if not PROVIDER_SERVERS_MODEL_MAP.get(provider):
return servers
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].region == region,
PROVIDER_SERVERS_MODEL_MAP[provider].type == mapped_server_type)
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
else:
abort(Response('', status=404))
def get_provider_images_for_region(provider, region):
if provider == 'microsoft':
return _get_azure_images_for_region_state(region, None)
images = []
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
if hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region'):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].region == region)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_provider_servers(provider):
servers = []
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.all()
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_images(provider):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.all()
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_data_version_for_provider_category(provider, category):
tablename = provider + category
try:
version = VersionsModel.query.filter(
VersionsModel.tablename == tablename).one()
except (NoResultFound, MultipleResultsFound):
# NOTE(gyee): we should never run into MultipleResultsFound exception
# or otherse we have data corruption problem in the database.
abort(Response('', status=404))
return {'version': str(version.version)}
def assert_valid_provider(provider):
provider = provider.lower()
supported_providers = get_supported_providers()
if provider not in supported_providers:
abort(Response('', status=404))
def assert_valid_category(category):
if category not in SUPPORTED_CATEGORIES:
abort(Response('', status=400))
def make_response(content_dict, collection_name, element_name):
if request.path.endswith('.xml'):
return Response(
json_to_xml(content_dict, collection_name, element_name),
mimetype='application/xml;charset=utf-8')
else:
if collection_name:
content = {collection_name: content_dict}
else:
content = content_dict
return jsonify(**content)
@app.route('/v1/providers', methods=['GET'])
@app.route('/v1/providers.json', methods=['GET'])
@app.route('/v1/providers.xml', methods=['GET'])
def list_providers():
providers = get_providers()
return make_response(providers, 'providers', 'provider')
@app.route('/v1/<provider>/servers/types', methods=['GET'])
@app.route('/v1/<provider>/servers/types.json', methods=['GET'])
@app.route('/v1/<provider>/servers/types.xml', methods=['GET'])
def list_provider_servers_types(provider):
assert_valid_provider(provider)
servers_types = get_provider_servers_types(provider)
return make_response(servers_types, 'types', 'type')
@app.route('/v1/images/states', methods=['GET'])
@app.route('/v1/images/states.json', methods=['GET'])
@app.route('/v1/images/states.xml', methods=['GET'])
def list_images_states():
states = []
for attr in dir(ImageState):
if attr[0] == '_':
continue
else:
states.append({'name': attr})
return make_response(states, 'states', 'state')
@app.route('/v1/<provider>/regions', methods=['GET'])
@app.route('/v1/<provider>/regions.json', methods=['GET'])
@app.route('/v1/<provider>/regions.xml', methods=['GET'])
def list_provider_regions(provider):
assert_valid_provider(provider)
regions = get_provider_regions(provider)
return make_response(regions, 'regions', 'region')
@app.route('/v1/<provider>/<region>/servers/<server_type>', methods=['GET'])
@app.route('/v1/<provider>/<region>/servers/<server_type>.json',
methods=['GET'])
@app.route('/v1/<provider>/<region>/servers/<server_type>.xml',
methods=['GET'])
def list_servers_for_provider_region_and_type(provider, region, server_type):
assert_valid_provider(provider)
servers = get_provider_servers_for_region_and_type(provider,
region, server_type)
return make_response(servers, 'servers', 'server')
@app.route('/v1/<provider>/servers/<server_type>', methods=['GET'])
@app.route('/v1/<provider>/servers/<server_type>.json', methods=['GET'])
@app.route('/v1/<provider>/servers/<server_type>.xml', methods=['GET'])
def list_servers_for_provider_type(provider, server_type):
assert_valid_provider(provider)
servers = get_provider_servers_for_type(provider, server_type)
return make_response(servers, 'servers', 'server')
@app.route('/v1/<provider>/<region>/images/<state>', methods=['GET'])
@app.route('/v1/<provider>/<region>/images/<state>.json', methods=['GET'])
@app.route('/v1/<provider>/<region>/images/<state>.xml', methods=['GET'])
def list_images_for_provider_region_and_state(provider, region, state):
assert_valid_provider(provider)
images = get_provider_images_for_region_and_state(provider, region, state)
return make_response(images, 'images', 'image')
@app.route('/v1/<provider>/images/<state>', methods=['GET'])
@app.route('/v1/<provider>/images/<state>.json', methods=['GET'])
@app.route('/v1/<provider>/images/<state>.xml', methods=['GET'])
def list_images_for_provider_state(provider, state):
assert_valid_provider(provider)
images = get_provider_images_for_state(provider, state)
return make_response(images, 'images', 'image')
@app.route('/v1/<provider>/<region>/<category>', methods=['GET'])
@app.route('/v1/<provider>/<region>/<category>.json', methods=['GET'])
@app.route('/v1/<provider>/<region>/<category>.xml', methods=['GET'])
def list_provider_resource_for_category(provider, region, category):
assert_valid_provider(provider)
assert_valid_category(category)
resources = globals()['get_provider_%s_for_region' % (category)](
provider, region)
return make_response(resources, category, category[:-1])
@app.route('/v1/<provider>/<category>', methods=['GET'])
@app.route('/v1/<provider>/<category>.json', methods=['GET'])
@app.route('/v1/<provider>/<category>.xml', methods=['GET'])
def list_provider_resource(provider, category):
assert_valid_provider(provider)
assert_valid_category(category)
resources = globals()['get_provider_%s' % (category)](provider)
return make_response(resources, category, category[:-1])
@app.route('/v1/<provider>/dataversion', methods=['GET'])
@app.route('/v1/<provider>/dataversion.json', methods=['GET'])
@app.route('/v1/<provider>/dataversion.xml', methods=['GET'])
def get_provider_category_data_version(provider):
assert_valid_provider(provider)
category = request.args.get('category')
assert_valid_category(category)
version = get_data_version_for_provider_category(provider, category)
return make_response(version, None, None)
@app.route('/package-version', methods=['GET'])
def get_package_version():
return make_response(
{'package version': pint_server.__VERSION__}, None, None)
@app.route('/', methods=['GET'])
def redirect_to_public_cloud():
#return redirect('https://www.suse.com/solutions/public-cloud/')
headers = {
'Location': 'https://www.suse.com/solutions/public-cloud/',
}
return Response('', status=301, headers=headers)
@app.route('/<path:path>')
def catch_all(path):
abort(Response('', status=400))
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
if __name__ == '__main__':
app.run()
| 37.391803 | 80 | 0.687536 |
import datetime
import re
from decimal import Decimal
from flask import (abort, Flask, jsonify, make_response, request, redirect,
Response)
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import text, or_
from sqlalchemy.exc import DataError
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from xml.dom import minidom
import xml.etree.ElementTree as ET
import pint_server
from pint_server.database import init_db
from pint_server.models import (ImageState, AmazonImagesModel,
OracleImagesModel, AlibabaImagesModel,
MicrosoftImagesModel, GoogleImagesModel,
AmazonServersModel, MicrosoftServersModel,
GoogleServersModel, ServerType,
VersionsModel, MicrosoftRegionMapModel)
app = Flask(__name__)
db_session = init_db()
cors_config = {
"origins": ["*"]
}
CORS(app, resources={
r"/*": cors_config
})
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
null_to_empty = lambda s : s or ''
REGIONSERVER_SMT_MAP = {
'smt': 'update',
'regionserver': 'region',
'regionserver-sap': 'region',
'regionserver-sles': 'region',
'update': 'update',
'region': 'region'
}
REGIONSERVER_SMT_REVERSED_MAP = {
'update': 'smt',
'region': 'regionserver'
}
PROVIDER_IMAGES_MODEL_MAP = {
'amazon': AmazonImagesModel,
'google': GoogleImagesModel,
'microsoft': MicrosoftImagesModel,
'alibaba': AlibabaImagesModel,
'oracle': OracleImagesModel
}
PROVIDER_SERVERS_MODEL_MAP = {
'amazon': AmazonServersModel,
'google': GoogleServersModel,
'microsoft': MicrosoftServersModel
}
PROVIDER_IMAGES_EXCLUDE_ATTRS = {
'microsoft': ['id']
}
PROVIDER_SERVERS_EXCLUDE_ATTRS = {
'amazon': ['id'],
'google': ['id'],
'microsoft': ['id']
}
SUPPORTED_CATEGORIES = ['images', 'servers']
def get_supported_providers():
versions = VersionsModel.query.with_entities(VersionsModel.tablename)
return list({re.sub('(servers|images)', '', v.tablename) for v in versions})
def get_providers():
return [{'name': provider} for provider in get_supported_providers()]
def json_to_xml(json_obj, collection_name, element_name):
if collection_name:
root = ET.Element(collection_name)
for dict in json_obj:
ET.SubElement(root, element_name, dict)
else:
if element_name:
root = ET.Element(element_name, json_obj)
else:
# NOTE(gyee): if neither collection_name and element_name are
# specified, we assume the json_obj has a single key value pair
# with key as the tag and value as the text
tag = list(json_obj.keys())[0]
root = ET.Element(tag)
root.text = json_obj[tag]
parsed = minidom.parseString(
ET.tostring(root, encoding='utf8', method='xml'))
return parsed.toprettyxml(indent=' ')
def get_formatted_dict(obj, extra_attrs=None, exclude_attrs=None):
obj_dict = {}
for attr in obj.__dict__.keys():
# FIXME(gyee): the orignal Pint server does not return the "changeinfo"
# or "urn" attribute if it's empty. So we'll need to do the same here.
# IMHO, I consider that a bug in the original Pint server as we should
# return all attributes regardless whether its empty or not.
if attr.lower() in ['urn', 'changeinfo'] and not obj.__dict__[attr]:
continue
# NOTE: the "shape" attribute will be processed together with "type"
# as it is internal only
if attr.lower() == 'shape':
continue
if exclude_attrs and attr in exclude_attrs:
continue
elif attr[0] == '_':
continue
else:
value = obj.__dict__[attr]
if isinstance(value, Decimal):
obj_dict[attr] = float(value)
elif isinstance(value, ImageState):
obj_dict[attr] = obj.state.value
elif isinstance(value, ServerType):
# NOTE(gyee): we need to reverse map the server type
# to make it backward compatible
if obj.__dict__['shape']:
obj_dict[attr] = "%s-%s" % (
REGIONSERVER_SMT_REVERSED_MAP[obj.type.value],
obj.__dict__['shape'])
else:
obj_dict[attr] = (
REGIONSERVER_SMT_REVERSED_MAP[obj.type.value])
elif isinstance(value, datetime.date):
obj_dict[attr] = value.strftime('%Y%m%d')
else:
obj_dict[attr] = null_to_empty(value)
if extra_attrs:
obj_dict.update(extra_attrs)
return obj_dict
def get_mapped_server_type_for_provider(provider, server_type):
if server_type not in REGIONSERVER_SMT_MAP:
abort(Response('', status=404))
mapped_server_type = REGIONSERVER_SMT_MAP[server_type]
if PROVIDER_SERVERS_MODEL_MAP.get(provider):
server_types_json = get_provider_servers_types(provider)
server_types = [t['name'] for t in server_types_json]
if mapped_server_type not in server_types:
abort(Response('', status=404))
return mapped_server_type
def get_provider_servers_for_type(provider, server_type):
servers = []
mapped_server_type = get_mapped_server_type_for_provider(
provider, server_type)
# NOTE(gyee): currently we don't have DB tables for both Alibaba and
if not PROVIDER_SERVERS_MODEL_MAP.get(provider):
return servers
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].type == mapped_server_type)
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_servers_types(provider):
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.distinct(
PROVIDER_SERVERS_MODEL_MAP[provider].type)
return [{'name': server.type.value} for server in servers]
else:
# Oracle servers. In order to maintain compatibility with the
# existing Pint server, we are returning the original server
# types. In the future, if we do decide to create the tables,
# then we can easily add them to PROVIDER_SERVERS_MODEL_MAP.
return [{'name': 'smt'}, {'name': 'regionserver'}]
def get_provider_regions(provider):
if provider == 'microsoft':
return _get_all_azure_regions()
servers = []
images = []
region_list = [] # Combination list
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.with_entities(
PROVIDER_SERVERS_MODEL_MAP[provider].region).distinct(
PROVIDER_SERVERS_MODEL_MAP[provider].region)
if hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region'):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.with_entities(
PROVIDER_IMAGES_MODEL_MAP[provider].region).distinct(
PROVIDER_IMAGES_MODEL_MAP[provider].region)
for server in servers:
if server.region not in region_list:
region_list.append(server.region)
for image in images:
if image.region not in region_list:
region_list.append(image.region)
return [{'name': r } for r in region_list]
def _get_all_azure_regions():
regions = []
environments = MicrosoftRegionMapModel.query.all()
for environment in environments:
if environment.region not in regions:
regions.append(environment.region)
if environment.canonicalname not in regions:
regions.append(environment.canonicalname)
return [{'name': r } for r in sorted(regions)]
def _get_azure_servers(region, server_type=None):
# first lookup canonical name for the given region
environment = MicrosoftRegionMapModel.query.filter(
or_(MicrosoftRegionMapModel.region == region,
MicrosoftRegionMapModel.canonicalname == region)).first()
if not environment:
abort(Response('', status=404))
# then get all the regions with the canonical name
environments = MicrosoftRegionMapModel.query.filter(
MicrosoftRegionMapModel.canonicalname == environment.canonicalname)
# get all the possible names for the region
all_regions = []
for environment in environments:
if environment.region not in all_regions:
all_regions.append(environment.region)
# get all the severs for that region
if server_type:
mapped_server_type = get_mapped_server_type_for_provider(
'microsoft', server_type)
servers = MicrosoftServersModel.query.filter(
MicrosoftServersModel.type == mapped_server_type,
MicrosoftServersModel.region.in_(all_regions))
else:
servers = MicrosoftServersModel.query.filter(
MicrosoftServersModel.region.in_(all_regions))
try:
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get('microsoft')
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
except DataError:
abort(Response('', status=404))
def _get_azure_images_for_region_state(region, state):
# first lookup the environment for the given region
environment = MicrosoftRegionMapModel.query.filter(
or_(MicrosoftRegionMapModel.region == region,
MicrosoftRegionMapModel.canonicalname == region)).first()
if not environment:
abort(Response('', status=404))
# assume the environment is unique per region
environment_name = environment.environment
# now pull all the images that matches the environment and state
if state is None:
images = MicrosoftImagesModel.query.filter(
MicrosoftImagesModel.environment == environment_name)
else:
images = MicrosoftImagesModel.query.filter(
MicrosoftImagesModel.environment == environment_name,
MicrosoftImagesModel.state == state)
extra_attrs = {'region': region}
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get('microsoft')
try:
return [get_formatted_dict(image, extra_attrs=extra_attrs,
exclude_attrs=exclude_attrs)
for image in images]
except DataError:
abort(Response('', status=404))
def get_provider_images_for_region_and_state(provider, region, state):
images = []
if provider == 'microsoft':
return _get_azure_images_for_region_state(region, state)
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if state in ImageState.__members__ and region in region_names:
if (hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region')):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].region == region,
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
else:
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
else:
abort(Response('', status=404))
def get_provider_images_for_state(provider, state):
if state in ImageState.__members__:
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].state == state)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_provider_servers_for_region(provider, region):
servers = []
if provider == 'microsoft':
return _get_azure_servers(region)
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].region == region)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_servers_for_region_and_type(provider, region, server_type):
if provider == 'microsoft':
return _get_azure_servers(region, server_type)
servers = []
mapped_server_type = get_mapped_server_type_for_provider(
provider, server_type)
# NOTE(gyee): for Alibaba and Oracle where we don't have any servers,
if not PROVIDER_SERVERS_MODEL_MAP.get(provider):
return servers
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.filter(
PROVIDER_SERVERS_MODEL_MAP[provider].region == region,
PROVIDER_SERVERS_MODEL_MAP[provider].type == mapped_server_type)
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
else:
abort(Response('', status=404))
def get_provider_images_for_region(provider, region):
if provider == 'microsoft':
return _get_azure_images_for_region_state(region, None)
images = []
region_names = []
for each in get_provider_regions(provider):
region_names.append(each['name'])
if region in region_names:
if hasattr(PROVIDER_IMAGES_MODEL_MAP[provider], 'region'):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.filter(
PROVIDER_IMAGES_MODEL_MAP[provider].region == region)
else:
abort(Response('', status=404))
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_provider_servers(provider):
servers = []
if PROVIDER_SERVERS_MODEL_MAP.get(provider) != None:
servers = PROVIDER_SERVERS_MODEL_MAP[provider].query.all()
exclude_attrs = PROVIDER_SERVERS_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(server, exclude_attrs=exclude_attrs)
for server in servers]
def get_provider_images(provider):
images = PROVIDER_IMAGES_MODEL_MAP[provider].query.all()
exclude_attrs = PROVIDER_IMAGES_EXCLUDE_ATTRS.get(provider)
return [get_formatted_dict(image, exclude_attrs=exclude_attrs)
for image in images]
def get_data_version_for_provider_category(provider, category):
tablename = provider + category
try:
version = VersionsModel.query.filter(
VersionsModel.tablename == tablename).one()
except (NoResultFound, MultipleResultsFound):
abort(Response('', status=404))
return {'version': str(version.version)}
def assert_valid_provider(provider):
provider = provider.lower()
supported_providers = get_supported_providers()
if provider not in supported_providers:
abort(Response('', status=404))
def assert_valid_category(category):
if category not in SUPPORTED_CATEGORIES:
abort(Response('', status=400))
def make_response(content_dict, collection_name, element_name):
if request.path.endswith('.xml'):
return Response(
json_to_xml(content_dict, collection_name, element_name),
mimetype='application/xml;charset=utf-8')
else:
if collection_name:
content = {collection_name: content_dict}
else:
content = content_dict
return jsonify(**content)
@app.route('/v1/providers', methods=['GET'])
@app.route('/v1/providers.json', methods=['GET'])
@app.route('/v1/providers.xml', methods=['GET'])
def list_providers():
providers = get_providers()
return make_response(providers, 'providers', 'provider')
@app.route('/v1/<provider>/servers/types', methods=['GET'])
@app.route('/v1/<provider>/servers/types.json', methods=['GET'])
@app.route('/v1/<provider>/servers/types.xml', methods=['GET'])
def list_provider_servers_types(provider):
assert_valid_provider(provider)
servers_types = get_provider_servers_types(provider)
return make_response(servers_types, 'types', 'type')
@app.route('/v1/images/states', methods=['GET'])
@app.route('/v1/images/states.json', methods=['GET'])
@app.route('/v1/images/states.xml', methods=['GET'])
def list_images_states():
states = []
for attr in dir(ImageState):
if attr[0] == '_':
continue
else:
states.append({'name': attr})
return make_response(states, 'states', 'state')
@app.route('/v1/<provider>/regions', methods=['GET'])
@app.route('/v1/<provider>/regions.json', methods=['GET'])
@app.route('/v1/<provider>/regions.xml', methods=['GET'])
def list_provider_regions(provider):
assert_valid_provider(provider)
regions = get_provider_regions(provider)
return make_response(regions, 'regions', 'region')
@app.route('/v1/<provider>/<region>/servers/<server_type>', methods=['GET'])
@app.route('/v1/<provider>/<region>/servers/<server_type>.json',
methods=['GET'])
@app.route('/v1/<provider>/<region>/servers/<server_type>.xml',
methods=['GET'])
def list_servers_for_provider_region_and_type(provider, region, server_type):
assert_valid_provider(provider)
servers = get_provider_servers_for_region_and_type(provider,
region, server_type)
return make_response(servers, 'servers', 'server')
@app.route('/v1/<provider>/servers/<server_type>', methods=['GET'])
@app.route('/v1/<provider>/servers/<server_type>.json', methods=['GET'])
@app.route('/v1/<provider>/servers/<server_type>.xml', methods=['GET'])
def list_servers_for_provider_type(provider, server_type):
assert_valid_provider(provider)
servers = get_provider_servers_for_type(provider, server_type)
return make_response(servers, 'servers', 'server')
@app.route('/v1/<provider>/<region>/images/<state>', methods=['GET'])
@app.route('/v1/<provider>/<region>/images/<state>.json', methods=['GET'])
@app.route('/v1/<provider>/<region>/images/<state>.xml', methods=['GET'])
def list_images_for_provider_region_and_state(provider, region, state):
assert_valid_provider(provider)
images = get_provider_images_for_region_and_state(provider, region, state)
return make_response(images, 'images', 'image')
@app.route('/v1/<provider>/images/<state>', methods=['GET'])
@app.route('/v1/<provider>/images/<state>.json', methods=['GET'])
@app.route('/v1/<provider>/images/<state>.xml', methods=['GET'])
def list_images_for_provider_state(provider, state):
assert_valid_provider(provider)
images = get_provider_images_for_state(provider, state)
return make_response(images, 'images', 'image')
@app.route('/v1/<provider>/<region>/<category>', methods=['GET'])
@app.route('/v1/<provider>/<region>/<category>.json', methods=['GET'])
@app.route('/v1/<provider>/<region>/<category>.xml', methods=['GET'])
def list_provider_resource_for_category(provider, region, category):
assert_valid_provider(provider)
assert_valid_category(category)
resources = globals()['get_provider_%s_for_region' % (category)](
provider, region)
return make_response(resources, category, category[:-1])
@app.route('/v1/<provider>/<category>', methods=['GET'])
@app.route('/v1/<provider>/<category>.json', methods=['GET'])
@app.route('/v1/<provider>/<category>.xml', methods=['GET'])
def list_provider_resource(provider, category):
assert_valid_provider(provider)
assert_valid_category(category)
resources = globals()['get_provider_%s' % (category)](provider)
return make_response(resources, category, category[:-1])
@app.route('/v1/<provider>/dataversion', methods=['GET'])
@app.route('/v1/<provider>/dataversion.json', methods=['GET'])
@app.route('/v1/<provider>/dataversion.xml', methods=['GET'])
def get_provider_category_data_version(provider):
assert_valid_provider(provider)
category = request.args.get('category')
assert_valid_category(category)
version = get_data_version_for_provider_category(provider, category)
return make_response(version, None, None)
@app.route('/package-version', methods=['GET'])
def get_package_version():
return make_response(
{'package version': pint_server.__VERSION__}, None, None)
@app.route('/', methods=['GET'])
def redirect_to_public_cloud():
headers = {
'Location': 'https://www.suse.com/solutions/public-cloud/',
}
return Response('', status=301, headers=headers)
@app.route('/<path:path>')
def catch_all(path):
abort(Response('', status=400))
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
if __name__ == '__main__':
app.run()
| true | true |
f73d4ea1bd14fc90ef6ba2ed40a68da5d006d904 | 129 | py | Python | atest/resources/testlibs/ctrl_or_command.py | ishandutta2007/SeleniumLibrary | 6b2748122b2af219d8247dab4b6db00d3c49e60d | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2018-03-22T11:27:57.000Z | 2021-03-04T19:15:38.000Z | atest/resources/testlibs/ctrl_or_command.py | ishandutta2007/SeleniumLibrary | 6b2748122b2af219d8247dab4b6db00d3c49e60d | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2018-12-03T17:01:30.000Z | 2019-03-30T16:09:01.000Z | atest/resources/testlibs/ctrl_or_command.py | ishandutta2007/SeleniumLibrary | 6b2748122b2af219d8247dab4b6db00d3c49e60d | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2018-01-23T16:11:55.000Z | 2018-01-23T16:11:55.000Z | import platform
def ctrl_or_command_key():
if platform.system() == 'Darwin':
return 'COMMAND'
return 'CONTROL'
| 16.125 | 37 | 0.651163 | import platform
def ctrl_or_command_key():
if platform.system() == 'Darwin':
return 'COMMAND'
return 'CONTROL'
| true | true |
f73d4ee3470719281777538e66e70fed17adec9b | 10,558 | py | Python | pipeline_plugins/tests/components/collections/sites/open/monitor_test/alarm_shield/test_v1_1.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | 2 | 2021-07-28T01:48:31.000Z | 2021-11-17T11:02:26.000Z | pipeline_plugins/tests/components/collections/sites/open/monitor_test/alarm_shield/test_v1_1.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | null | null | null | pipeline_plugins/tests/components/collections/sites/open/monitor_test/alarm_shield/test_v1_1.py | wkma/bk-sops | 8fb5609c0c4495c28d588fbafa9d9f5f2976929b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from mock import MagicMock
from pipeline.component_framework.test import (
ComponentTestMixin,
ComponentTestCase,
CallAssertion,
ExecuteAssertion,
Call,
Patcher,
)
from pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1 import MonitorAlarmShieldComponent
class MonitorAlarmShieldComponentTest(TestCase, ComponentTestMixin):
def cases(self):
return [ALTER_BILL_FAIL_CASE, ALTER_BILL_SUCCESS_CASE]
def component_cls(self):
return MonitorAlarmShieldComponent
class MockClient(object):
def __init__(self, add_shield_result=None):
self.add_shield = MagicMock(return_value=add_shield_result)
def __call__(self, *args, **kwargs):
return self
# mock path
GET_CLIENT_BY_USER = "pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1.BKMonitorClient"
GET_MODULE_ID_LIST_BY_NAME = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_module_id_list_by_name"
)
GET_SET_LIST = "pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1.get_set_list"
GET_LIST_BY_SELECTED_NAMES = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_list_by_selected_names"
)
GET_SERVICE_TEMPLATE_LIST = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_service_template_list"
)
GET_SERVICE_TEMPLATE_LIST_BY_NAMES = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_service_template_list_by_names"
)
# mock client
CREATE_SHIELD_FAIL_CLIENT = MockClient(add_shield_result={"result": False, "message": "create shield fail"})
CREATE_SHIELD_SUCCESS_CLIENT = MockClient(add_shield_result={"result": True, "data": {"id": "1"}, "message": "success"})
INPUT_DATA = {
"bk_alarm_shield_info": {
"bk_alarm_shield_scope": "node",
"bk_alarm_shield_business": 2,
"bk_alarm_shield_node": {
"bk_set_method": "select",
"bk_set_select": ["set_name1", "set_name2"],
"bk_set_text": "",
"bk_module_method": "select",
"bk_module_select": ["module_name1", "module_name2", "module_name3"],
"bk_module_text": "",
},
"bk_alarm_shield_IP": "",
},
"bk_alarm_shield_target": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
"bk_alarm_shield_begin_time": "2020-09-28 11:18:58",
"bk_alarm_shield_end_time": "2020-09-28 11:18:58",
"bk_alarm_time_type": "0",
"bk_alarm_shield_duration": "0",
}
def get_set_list(username, bk_biz_id, bk_supplier_account, kwargs=None):
set_list = [
{"bk_set_id": 2, "bk_set_name": "set_name1"},
{"bk_set_id": 3, "bk_set_name": "set_name2"},
{"bk_set_id": 4, "bk_set_name": "set_name3"},
{"bk_set_id": 5, "bk_set_name": "set_name4"},
]
return set_list
def get_list_by_selected_names(set_names, set_list):
selected_names = [{"bk_set_id": 2, "bk_set_name": "set_name1"}, {"bk_set_id": 3, "bk_set_name": "set_name2"}]
return selected_names
def get_service_template_list(username, bk_biz_id, bk_supplier_account):
service_template_list = [
{"bk_biz_id": 2, "name": "module_name1", "service_category_id": 32, "id": 51},
{"bk_biz_id": 2, "name": "module_name2", "service_category_id": 32, "id": 50},
{"bk_biz_id": 2, "name": "module_name3", "service_category_id": 2, "id": 47},
{"bk_biz_id": 2, "name": "module_name4", "service_category_id": 2, "id": 46},
{"bk_biz_id": 2, "name": "module_name5", "service_category_id": 2, "id": 45},
]
return service_template_list
def get_service_template_list_by_names(service_template_names, service_template_list):
service_template_names_list = [
{"bk_biz_id": 2, "name": "module_name1", "service_category_id": 32, "id": 51},
{"bk_biz_id": 2, "name": "module_name2", "service_category_id": 32, "id": 50},
{"bk_biz_id": 2, "name": "module_name3", "service_category_id": 2, "id": 47},
{"bk_biz_id": 2, "name": "module_name4", "service_category_id": 2, "id": 46},
{"bk_biz_id": 2, "name": "module_name5", "service_category_id": 2, "id": 45},
]
return service_template_names_list
def get_module_id_list_by_name(bk_biz_id, username, set_list, service_template_list):
module_id = [1, 2, 3, 4, 5]
return module_id
# test case
ALTER_BILL_FAIL_CASE = ComponentTestCase(
name="create shield fail case",
inputs=INPUT_DATA,
parent_data={"executor": "executor", "biz_cc_id": 2},
execute_assertion=ExecuteAssertion(
success=False,
outputs={
"shield_id": "",
"message": '调用监控平台(Monitor)接口monitor.create_shield返回失败, params={"begin_time":"2020-09-28 11:18:58",'
'"bk_biz_id":2,"category":"scope","cycle_config":{"begin_time":"","end_time":"","day_list":[],'
'"week_list":[],"type":1},"description":"shield by bk_sops","dimension_config":'
'{"scope_type":"node","target":[{"bk_obj_id":"module","bk_inst_id":1},{"bk_obj_id":"module",'
'"bk_inst_id":2},{"bk_obj_id":"module","bk_inst_id":3},{"bk_obj_id":"module","bk_inst_id":4},'
'{"bk_obj_id":"module","bk_inst_id":5}],"metric_id":["bk_monitor.system.load.load5",'
'"bk_monitor.system.cpu_summary.usage"]},"end_time":"2020-09-28 11:18:58","notice_config":{},'
'"shield_notice":false}, error=create shield fail',
},
),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(
func=CREATE_SHIELD_FAIL_CLIENT.add_shield,
calls=[
Call(
**{
"begin_time": "2020-09-28 11:18:58",
"bk_biz_id": 2,
"category": "scope",
"cycle_config": {"begin_time": "", "end_time": "", "day_list": [], "week_list": [], "type": 1},
"description": "shield by bk_sops",
"dimension_config": {
"scope_type": "node",
"target": [
{"bk_obj_id": "module", "bk_inst_id": 1},
{"bk_obj_id": "module", "bk_inst_id": 2},
{"bk_obj_id": "module", "bk_inst_id": 3},
{"bk_obj_id": "module", "bk_inst_id": 4},
{"bk_obj_id": "module", "bk_inst_id": 5},
],
"metric_id": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
},
"end_time": "2020-09-28 11:18:58",
"notice_config": {},
"shield_notice": False,
}
)
],
)
],
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=CREATE_SHIELD_FAIL_CLIENT),
Patcher(target=GET_SET_LIST, side_effect=get_set_list),
Patcher(target=GET_LIST_BY_SELECTED_NAMES, side_effect=get_list_by_selected_names),
Patcher(target=GET_SERVICE_TEMPLATE_LIST, side_effect=get_service_template_list),
Patcher(target=GET_SERVICE_TEMPLATE_LIST_BY_NAMES, side_effect=get_service_template_list_by_names),
Patcher(target=GET_MODULE_ID_LIST_BY_NAME, side_effect=get_module_id_list_by_name),
],
)
ALTER_BILL_SUCCESS_CASE = ComponentTestCase(
name="create shield success case",
inputs=INPUT_DATA,
parent_data={"executor": "executor", "biz_cc_id": 2},
execute_assertion=ExecuteAssertion(success=True, outputs={"shield_id": "1", "message": "success"}),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(
func=CREATE_SHIELD_SUCCESS_CLIENT.add_shield,
calls=[
Call(
**{
"begin_time": "2020-09-28 11:18:58",
"bk_biz_id": 2,
"category": "scope",
"cycle_config": {"begin_time": "", "end_time": "", "day_list": [], "week_list": [], "type": 1},
"description": "shield by bk_sops",
"dimension_config": {
"scope_type": "node",
"target": [
{"bk_obj_id": "module", "bk_inst_id": 1},
{"bk_obj_id": "module", "bk_inst_id": 2},
{"bk_obj_id": "module", "bk_inst_id": 3},
{"bk_obj_id": "module", "bk_inst_id": 4},
{"bk_obj_id": "module", "bk_inst_id": 5},
],
"metric_id": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
},
"end_time": "2020-09-28 11:18:58",
"notice_config": {},
"shield_notice": False,
}
)
],
)
],
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=CREATE_SHIELD_SUCCESS_CLIENT),
Patcher(target=GET_SET_LIST, side_effect=get_set_list),
Patcher(target=GET_LIST_BY_SELECTED_NAMES, side_effect=get_list_by_selected_names),
Patcher(target=GET_SERVICE_TEMPLATE_LIST, side_effect=get_service_template_list),
Patcher(target=GET_SERVICE_TEMPLATE_LIST_BY_NAMES, side_effect=get_service_template_list_by_names),
Patcher(target=GET_MODULE_ID_LIST_BY_NAME, side_effect=get_module_id_list_by_name),
],
)
| 45.119658 | 120 | 0.61612 |
from django.test import TestCase
from mock import MagicMock
from pipeline.component_framework.test import (
ComponentTestMixin,
ComponentTestCase,
CallAssertion,
ExecuteAssertion,
Call,
Patcher,
)
from pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1 import MonitorAlarmShieldComponent
class MonitorAlarmShieldComponentTest(TestCase, ComponentTestMixin):
def cases(self):
return [ALTER_BILL_FAIL_CASE, ALTER_BILL_SUCCESS_CASE]
def component_cls(self):
return MonitorAlarmShieldComponent
class MockClient(object):
def __init__(self, add_shield_result=None):
self.add_shield = MagicMock(return_value=add_shield_result)
def __call__(self, *args, **kwargs):
return self
GET_CLIENT_BY_USER = "pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1.BKMonitorClient"
GET_MODULE_ID_LIST_BY_NAME = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_module_id_list_by_name"
)
GET_SET_LIST = "pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1.get_set_list"
GET_LIST_BY_SELECTED_NAMES = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_list_by_selected_names"
)
GET_SERVICE_TEMPLATE_LIST = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_service_template_list"
)
GET_SERVICE_TEMPLATE_LIST_BY_NAMES = (
"pipeline_plugins.components.collections.sites.open.monitor.alarm_shield.v1_1" ".get_service_template_list_by_names"
)
CREATE_SHIELD_FAIL_CLIENT = MockClient(add_shield_result={"result": False, "message": "create shield fail"})
CREATE_SHIELD_SUCCESS_CLIENT = MockClient(add_shield_result={"result": True, "data": {"id": "1"}, "message": "success"})
INPUT_DATA = {
"bk_alarm_shield_info": {
"bk_alarm_shield_scope": "node",
"bk_alarm_shield_business": 2,
"bk_alarm_shield_node": {
"bk_set_method": "select",
"bk_set_select": ["set_name1", "set_name2"],
"bk_set_text": "",
"bk_module_method": "select",
"bk_module_select": ["module_name1", "module_name2", "module_name3"],
"bk_module_text": "",
},
"bk_alarm_shield_IP": "",
},
"bk_alarm_shield_target": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
"bk_alarm_shield_begin_time": "2020-09-28 11:18:58",
"bk_alarm_shield_end_time": "2020-09-28 11:18:58",
"bk_alarm_time_type": "0",
"bk_alarm_shield_duration": "0",
}
def get_set_list(username, bk_biz_id, bk_supplier_account, kwargs=None):
set_list = [
{"bk_set_id": 2, "bk_set_name": "set_name1"},
{"bk_set_id": 3, "bk_set_name": "set_name2"},
{"bk_set_id": 4, "bk_set_name": "set_name3"},
{"bk_set_id": 5, "bk_set_name": "set_name4"},
]
return set_list
def get_list_by_selected_names(set_names, set_list):
selected_names = [{"bk_set_id": 2, "bk_set_name": "set_name1"}, {"bk_set_id": 3, "bk_set_name": "set_name2"}]
return selected_names
def get_service_template_list(username, bk_biz_id, bk_supplier_account):
service_template_list = [
{"bk_biz_id": 2, "name": "module_name1", "service_category_id": 32, "id": 51},
{"bk_biz_id": 2, "name": "module_name2", "service_category_id": 32, "id": 50},
{"bk_biz_id": 2, "name": "module_name3", "service_category_id": 2, "id": 47},
{"bk_biz_id": 2, "name": "module_name4", "service_category_id": 2, "id": 46},
{"bk_biz_id": 2, "name": "module_name5", "service_category_id": 2, "id": 45},
]
return service_template_list
def get_service_template_list_by_names(service_template_names, service_template_list):
service_template_names_list = [
{"bk_biz_id": 2, "name": "module_name1", "service_category_id": 32, "id": 51},
{"bk_biz_id": 2, "name": "module_name2", "service_category_id": 32, "id": 50},
{"bk_biz_id": 2, "name": "module_name3", "service_category_id": 2, "id": 47},
{"bk_biz_id": 2, "name": "module_name4", "service_category_id": 2, "id": 46},
{"bk_biz_id": 2, "name": "module_name5", "service_category_id": 2, "id": 45},
]
return service_template_names_list
def get_module_id_list_by_name(bk_biz_id, username, set_list, service_template_list):
module_id = [1, 2, 3, 4, 5]
return module_id
ALTER_BILL_FAIL_CASE = ComponentTestCase(
name="create shield fail case",
inputs=INPUT_DATA,
parent_data={"executor": "executor", "biz_cc_id": 2},
execute_assertion=ExecuteAssertion(
success=False,
outputs={
"shield_id": "",
"message": '调用监控平台(Monitor)接口monitor.create_shield返回失败, params={"begin_time":"2020-09-28 11:18:58",'
'"bk_biz_id":2,"category":"scope","cycle_config":{"begin_time":"","end_time":"","day_list":[],'
'"week_list":[],"type":1},"description":"shield by bk_sops","dimension_config":'
'{"scope_type":"node","target":[{"bk_obj_id":"module","bk_inst_id":1},{"bk_obj_id":"module",'
'"bk_inst_id":2},{"bk_obj_id":"module","bk_inst_id":3},{"bk_obj_id":"module","bk_inst_id":4},'
'{"bk_obj_id":"module","bk_inst_id":5}],"metric_id":["bk_monitor.system.load.load5",'
'"bk_monitor.system.cpu_summary.usage"]},"end_time":"2020-09-28 11:18:58","notice_config":{},'
'"shield_notice":false}, error=create shield fail',
},
),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(
func=CREATE_SHIELD_FAIL_CLIENT.add_shield,
calls=[
Call(
**{
"begin_time": "2020-09-28 11:18:58",
"bk_biz_id": 2,
"category": "scope",
"cycle_config": {"begin_time": "", "end_time": "", "day_list": [], "week_list": [], "type": 1},
"description": "shield by bk_sops",
"dimension_config": {
"scope_type": "node",
"target": [
{"bk_obj_id": "module", "bk_inst_id": 1},
{"bk_obj_id": "module", "bk_inst_id": 2},
{"bk_obj_id": "module", "bk_inst_id": 3},
{"bk_obj_id": "module", "bk_inst_id": 4},
{"bk_obj_id": "module", "bk_inst_id": 5},
],
"metric_id": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
},
"end_time": "2020-09-28 11:18:58",
"notice_config": {},
"shield_notice": False,
}
)
],
)
],
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=CREATE_SHIELD_FAIL_CLIENT),
Patcher(target=GET_SET_LIST, side_effect=get_set_list),
Patcher(target=GET_LIST_BY_SELECTED_NAMES, side_effect=get_list_by_selected_names),
Patcher(target=GET_SERVICE_TEMPLATE_LIST, side_effect=get_service_template_list),
Patcher(target=GET_SERVICE_TEMPLATE_LIST_BY_NAMES, side_effect=get_service_template_list_by_names),
Patcher(target=GET_MODULE_ID_LIST_BY_NAME, side_effect=get_module_id_list_by_name),
],
)
ALTER_BILL_SUCCESS_CASE = ComponentTestCase(
name="create shield success case",
inputs=INPUT_DATA,
parent_data={"executor": "executor", "biz_cc_id": 2},
execute_assertion=ExecuteAssertion(success=True, outputs={"shield_id": "1", "message": "success"}),
schedule_assertion=None,
execute_call_assertion=[
CallAssertion(
func=CREATE_SHIELD_SUCCESS_CLIENT.add_shield,
calls=[
Call(
**{
"begin_time": "2020-09-28 11:18:58",
"bk_biz_id": 2,
"category": "scope",
"cycle_config": {"begin_time": "", "end_time": "", "day_list": [], "week_list": [], "type": 1},
"description": "shield by bk_sops",
"dimension_config": {
"scope_type": "node",
"target": [
{"bk_obj_id": "module", "bk_inst_id": 1},
{"bk_obj_id": "module", "bk_inst_id": 2},
{"bk_obj_id": "module", "bk_inst_id": 3},
{"bk_obj_id": "module", "bk_inst_id": 4},
{"bk_obj_id": "module", "bk_inst_id": 5},
],
"metric_id": ["bk_monitor.system.load.load5", "bk_monitor.system.cpu_summary.usage"],
},
"end_time": "2020-09-28 11:18:58",
"notice_config": {},
"shield_notice": False,
}
)
],
)
],
patchers=[
Patcher(target=GET_CLIENT_BY_USER, return_value=CREATE_SHIELD_SUCCESS_CLIENT),
Patcher(target=GET_SET_LIST, side_effect=get_set_list),
Patcher(target=GET_LIST_BY_SELECTED_NAMES, side_effect=get_list_by_selected_names),
Patcher(target=GET_SERVICE_TEMPLATE_LIST, side_effect=get_service_template_list),
Patcher(target=GET_SERVICE_TEMPLATE_LIST_BY_NAMES, side_effect=get_service_template_list_by_names),
Patcher(target=GET_MODULE_ID_LIST_BY_NAME, side_effect=get_module_id_list_by_name),
],
)
| true | true |
f73d5082685750c6314f78e71043c8357cb4f17b | 4,912 | py | Python | env/lib/python3.7/genericpath.py | JacobMiske/nuclear-database-APIs | bc9fb6afb9aa0d98dde5d744d8f22b2791597e78 | [
"MIT"
] | null | null | null | env/lib/python3.7/genericpath.py | JacobMiske/nuclear-database-APIs | bc9fb6afb9aa0d98dde5d744d8f22b2791597e78 | [
"MIT"
] | null | null | null | env/lib/python3.7/genericpath.py | JacobMiske/nuclear-database-APIs | bc9fb6afb9aa0d98dde5d744d8f22b2791597e78 | [
"MIT"
] | 1 | 2020-05-01T20:23:35.000Z | 2020-05-01T20:23:35.000Z | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile',
'samestat']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except OSError:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path on systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except OSError:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except OSError:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
# Some people pass in a list of pathname parts to operate in an OS-agnostic
# fashion; don't try to translate in that case as that's an abuse of the
# src and they are already doing what they need to be OS-agnostic and so
# they most likely won't be using an os.PathLike object in the sublists.
if not isinstance(m[0], (list, tuple)):
m = tuple(map(os.fspath, m))
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return (s1.st_ino == s2.st_ino and
s1.st_dev == s2.st_dev)
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file or directory
This is determined by the device number and i-node number and
raises an exception if an os.stat() call on either pathname fails.
"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
"""Test whether two open file objects reference the same file"""
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
# NOTE: This code must work for text and bytes strings.
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex:filenameIndex+1] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, p[:0]
def _check_arg_types(funcname, *args):
hasstr = hasbytes = False
for s in args:
if isinstance(s, str):
hasstr = True
elif isinstance(s, bytes):
hasbytes = True
else:
raise TypeError('%s() argument must be str or bytes, not %r' %
(funcname, s.__class__.__name__)) from None
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") from None
| 31.487179 | 83 | 0.660423 | import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile',
'samestat']
def exists(path):
try:
os.stat(path)
except OSError:
return False
return True
def isfile(path):
try:
st = os.stat(path)
except OSError:
return False
return stat.S_ISREG(st.st_mode)
def isdir(s):
try:
st = os.stat(s)
except OSError:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
return os.stat(filename).st_size
def getmtime(filename):
return os.stat(filename).st_mtime
def getatime(filename):
return os.stat(filename).st_atime
def getctime(filename):
return os.stat(filename).st_ctime
def commonprefix(m):
if not m: return ''
if not isinstance(m[0], (list, tuple)):
m = tuple(map(os.fspath, m))
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
return (s1.st_ino == s2.st_ino and
s1.st_dev == s2.st_dev)
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
# NOTE: This code must work for text and bytes strings.
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex:filenameIndex+1] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, p[:0]
def _check_arg_types(funcname, *args):
hasstr = hasbytes = False
for s in args:
if isinstance(s, str):
hasstr = True
elif isinstance(s, bytes):
hasbytes = True
else:
raise TypeError('%s() argument must be str or bytes, not %r' %
(funcname, s.__class__.__name__)) from None
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") from None
| true | true |
f73d50c0410ebc1fc9af26d613df3949dfbb14a3 | 10,672 | py | Python | tests/client/app_search/test_app_search.py | pmusa/enterprise-search-python | 31a64285532373b5e709718a15ea24746b1f3e6e | [
"Apache-2.0"
] | null | null | null | tests/client/app_search/test_app_search.py | pmusa/enterprise-search-python | 31a64285532373b5e709718a15ea24746b1f3e6e | [
"Apache-2.0"
] | 1 | 2021-02-24T07:13:31.000Z | 2021-02-24T07:13:31.000Z | tests/client/app_search/test_app_search.py | pmusa/enterprise-search-python | 31a64285532373b5e709718a15ea24746b1f3e6e | [
"Apache-2.0"
] | null | null | null | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import jwt
import pytest
from elastic_enterprise_search import AppSearch, UnauthorizedError
@pytest.fixture()
def app_search():
yield AppSearch(
"http://localhost:3002", http_auth="private-k3ra4bqu12vgnhe3wibdw69f"
)
@pytest.mark.vcr()
def test_list_engines(app_search):
resp = app_search.list_engines()
assert resp.status == 200
assert resp == {
"meta": {
"page": {"current": 1, "total_pages": 1, "total_results": 1, "size": 25}
},
"results": [
{"name": "national-parks-demo", "type": "default", "language": None}
],
}
@pytest.mark.vcr()
def test_list_documents(app_search):
resp = app_search.list_documents(
engine_name="national-parks-demo", page_size=2, current_page=3
)
assert resp.status == 200
assert resp == {
"meta": {
"page": {"current": 3, "total_pages": 30, "total_results": 59, "size": 2}
},
"results": [
{
"nps_link": "https://www.nps.gov/zion/index.htm",
"title": "Zion",
"date_established": "1919-11-19T06:00:00+00:00",
"world_heritage_site": "false",
"states": ["Utah"],
"description": "Located at the junction of the Colorado Plateau, Great Basin, and Mojave Desert, this park contains sandstone features such as mesas, rock towers, and canyons, including the Virgin River Narrows. The various sandstone formations and the forks of the Virgin River create a wilderness divided into four ecosystems: desert, riparian, woodland, and coniferous forest.",
"visitors": 4295127.0,
"id": "park_zion",
"location": "37.3,-113.05",
"square_km": 595.8,
"acres": 147237.02,
},
{
"nps_link": "https://www.nps.gov/yell/index.htm",
"title": "Yellowstone",
"date_established": "1872-03-01T06:00:00+00:00",
"world_heritage_site": "true",
"states": ["Wyoming", "Montana", "Idaho"],
"description": "Situated on the Yellowstone Caldera, the park has an expansive network of geothermal areas including boiling mud pots, vividly colored hot springs such as Grand Prismatic Spring, and regularly erupting geysers, the best-known being Old Faithful. The yellow-hued Grand Canyon of the Yellowstone River contains several high waterfalls, while four mountain ranges traverse the park. More than 60 mammal species including gray wolves, grizzly bears, black bears, lynxes, bison, and elk, make this park one of the best wildlife viewing spots in the country.",
"visitors": 4257177.0,
"id": "park_yellowstone",
"location": "44.6,-110.5",
"square_km": 8983.2,
"acres": 2219790.71,
},
],
}
@pytest.mark.vcr()
def test_delete_documents(app_search):
resp = app_search.delete_documents(
engine_name="national-parks-demo",
body=[
"park_yellowstone",
"park_zion",
],
)
assert resp.status == 200
assert resp == [
{"id": "park_yellowstone", "deleted": True},
{"id": "park_zion", "deleted": True},
]
@pytest.mark.vcr()
def test_index_documents(app_search):
resp = app_search.index_documents(
engine_name="national-parks-demo",
body=[
{
"nps_link": "https://www.nps.gov/zion/index.htm",
"title": "Zion",
"date_established": "1919-11-19T06:00:00+00:00",
"world_heritage_site": "false",
"states": ["Utah"],
"description": "Located at the junction of the Colorado Plateau, Great Basin, and Mojave Desert, this park contains sandstone features such as mesas, rock towers, and canyons, including the Virgin River Narrows. The various sandstone formations and the forks of the Virgin River create a wilderness divided into four ecosystems: desert, riparian, woodland, and coniferous forest.",
"visitors": 4295127.0,
"id": "park_zion",
"location": "37.3,-113.05",
"square_km": 595.8,
"acres": 147237.02,
},
{
"nps_link": "https://www.nps.gov/yell/index.htm",
"title": "Yellowstone",
"date_established": "1872-03-01T06:00:00+00:00",
"world_heritage_site": "true",
"states": ["Wyoming", "Montana", "Idaho"],
"description": "Situated on the Yellowstone Caldera, the park has an expansive network of geothermal areas including boiling mud pots, vividly colored hot springs such as Grand Prismatic Spring, and regularly erupting geysers, the best-known being Old Faithful. The yellow-hued Grand Canyon of the Yellowstone River contains several high waterfalls, while four mountain ranges traverse the park. More than 60 mammal species including gray wolves, grizzly bears, black bears, lynxes, bison, and elk, make this park one of the best wildlife viewing spots in the country.",
"visitors": 4257177.0,
"id": "park_yellowstone",
"location": "44.6,-110.5",
"square_km": 8983.2,
"acres": 2219790.71,
},
],
)
assert resp.status == 200
assert resp == [
{"id": "park_zion", "errors": []},
{"id": "park_yellowstone", "errors": []},
]
@pytest.mark.vcr()
def test_search(app_search):
resp = app_search.search(
engine_name="national-parks-demo", body={"query": "tree", "page": {"size": 2}}
)
assert resp.status == 200
assert resp == {
"meta": {
"alerts": [],
"warnings": [],
"page": {"current": 1, "total_pages": 12, "total_results": 23, "size": 2},
"engine": {"name": "national-parks-demo", "type": "default"},
"request_id": "4999a4ef-b750-4bef-aea2-87f54d9c87b3",
},
"results": [
{
"nps_link": {"raw": "https://www.nps.gov/grsm/index.htm"},
"title": {"raw": "Great Smoky Mountains"},
"date_established": {"raw": "1934-06-15T05:00:00+00:00"},
"world_heritage_site": {"raw": "true"},
"states": {"raw": ["Tennessee", "North Carolina"]},
"description": {
"raw": "The Great Smoky Mountains, part of the Appalachian Mountains, span a wide range of elevations, making them home to over 400 vertebrate species, 100 tree species, and 5000 plant species. Hiking is the park's main attraction, with over 800 miles (1,300 km) of trails, including 70 miles (110 km) of the Appalachian Trail. Other activities include fishing, horseback riding, and touring nearly 80 historic structures."
},
"visitors": {"raw": 11312786.0},
"_meta": {
"id": "park_great-smoky-mountains",
"engine": "national-parks-demo",
"score": 16969184.0,
},
"id": {"raw": "park_great-smoky-mountains"},
"location": {"raw": "35.68,-83.53"},
"square_km": {"raw": 2114.2},
"acres": {"raw": 522426.88},
},
{
"nps_link": {"raw": "https://www.nps.gov/yose/index.htm"},
"title": {"raw": "Yosemite"},
"date_established": {"raw": "1890-10-01T05:00:00+00:00"},
"world_heritage_site": {"raw": "true"},
"states": {"raw": ["California"]},
"description": {
"raw": "Yosemite features sheer granite cliffs, exceptionally tall waterfalls, and old-growth forests at a unique intersection of geology and hydrology. Half Dome and El Capitan rise from the park's centerpiece, the glacier-carved Yosemite Valley, and from its vertical walls drop Yosemite Falls, one of North America's tallest waterfalls at 2,425 feet (739 m) high. Three giant sequoia groves, along with a pristine wilderness in the heart of the Sierra Nevada, are home to a wide variety of rare plant and animal species."
},
"visitors": {"raw": 5028868.0},
"_meta": {
"id": "park_yosemite",
"engine": "national-parks-demo",
"score": 7543302.0,
},
"id": {"raw": "park_yosemite"},
"location": {"raw": "37.83,-119.5"},
"square_km": {"raw": 3082.7},
"acres": {"raw": 761747.5},
},
],
}
@pytest.mark.vcr()
def test_not_authorized(app_search):
app_search.http_auth = None
with pytest.raises(UnauthorizedError) as e:
app_search.list_engines()
assert e.value.status == 401
assert e.value.message == {"error": "You need to sign in before continuing."}
assert e.value.errors == ()
resp = app_search.list_engines(ignore_status=401)
assert resp.status == 401
assert resp == {"error": "You need to sign in before continuing."}
def test_create_signed_search_key():
private_key = "private-"
signed_key = AppSearch.create_signed_search_key(
api_key=private_key,
api_key_name="api-key-name",
search_fields={"first_name": {}},
filters={"status": "available"},
facets=None,
)
assert isinstance(signed_key, str)
assert jwt.decode(signed_key, private_key, algorithms="HS256") == {
"api_key_name": "api-key-name",
"facets": None,
"filters": {"status": "available"},
"search_fields": {"first_name": {}},
}
| 46 | 586 | 0.585551 |
import jwt
import pytest
from elastic_enterprise_search import AppSearch, UnauthorizedError
@pytest.fixture()
def app_search():
yield AppSearch(
"http://localhost:3002", http_auth="private-k3ra4bqu12vgnhe3wibdw69f"
)
@pytest.mark.vcr()
def test_list_engines(app_search):
resp = app_search.list_engines()
assert resp.status == 200
assert resp == {
"meta": {
"page": {"current": 1, "total_pages": 1, "total_results": 1, "size": 25}
},
"results": [
{"name": "national-parks-demo", "type": "default", "language": None}
],
}
@pytest.mark.vcr()
def test_list_documents(app_search):
resp = app_search.list_documents(
engine_name="national-parks-demo", page_size=2, current_page=3
)
assert resp.status == 200
assert resp == {
"meta": {
"page": {"current": 3, "total_pages": 30, "total_results": 59, "size": 2}
},
"results": [
{
"nps_link": "https://www.nps.gov/zion/index.htm",
"title": "Zion",
"date_established": "1919-11-19T06:00:00+00:00",
"world_heritage_site": "false",
"states": ["Utah"],
"description": "Located at the junction of the Colorado Plateau, Great Basin, and Mojave Desert, this park contains sandstone features such as mesas, rock towers, and canyons, including the Virgin River Narrows. The various sandstone formations and the forks of the Virgin River create a wilderness divided into four ecosystems: desert, riparian, woodland, and coniferous forest.",
"visitors": 4295127.0,
"id": "park_zion",
"location": "37.3,-113.05",
"square_km": 595.8,
"acres": 147237.02,
},
{
"nps_link": "https://www.nps.gov/yell/index.htm",
"title": "Yellowstone",
"date_established": "1872-03-01T06:00:00+00:00",
"world_heritage_site": "true",
"states": ["Wyoming", "Montana", "Idaho"],
"description": "Situated on the Yellowstone Caldera, the park has an expansive network of geothermal areas including boiling mud pots, vividly colored hot springs such as Grand Prismatic Spring, and regularly erupting geysers, the best-known being Old Faithful. The yellow-hued Grand Canyon of the Yellowstone River contains several high waterfalls, while four mountain ranges traverse the park. More than 60 mammal species including gray wolves, grizzly bears, black bears, lynxes, bison, and elk, make this park one of the best wildlife viewing spots in the country.",
"visitors": 4257177.0,
"id": "park_yellowstone",
"location": "44.6,-110.5",
"square_km": 8983.2,
"acres": 2219790.71,
},
],
}
@pytest.mark.vcr()
def test_delete_documents(app_search):
resp = app_search.delete_documents(
engine_name="national-parks-demo",
body=[
"park_yellowstone",
"park_zion",
],
)
assert resp.status == 200
assert resp == [
{"id": "park_yellowstone", "deleted": True},
{"id": "park_zion", "deleted": True},
]
@pytest.mark.vcr()
def test_index_documents(app_search):
resp = app_search.index_documents(
engine_name="national-parks-demo",
body=[
{
"nps_link": "https://www.nps.gov/zion/index.htm",
"title": "Zion",
"date_established": "1919-11-19T06:00:00+00:00",
"world_heritage_site": "false",
"states": ["Utah"],
"description": "Located at the junction of the Colorado Plateau, Great Basin, and Mojave Desert, this park contains sandstone features such as mesas, rock towers, and canyons, including the Virgin River Narrows. The various sandstone formations and the forks of the Virgin River create a wilderness divided into four ecosystems: desert, riparian, woodland, and coniferous forest.",
"visitors": 4295127.0,
"id": "park_zion",
"location": "37.3,-113.05",
"square_km": 595.8,
"acres": 147237.02,
},
{
"nps_link": "https://www.nps.gov/yell/index.htm",
"title": "Yellowstone",
"date_established": "1872-03-01T06:00:00+00:00",
"world_heritage_site": "true",
"states": ["Wyoming", "Montana", "Idaho"],
"description": "Situated on the Yellowstone Caldera, the park has an expansive network of geothermal areas including boiling mud pots, vividly colored hot springs such as Grand Prismatic Spring, and regularly erupting geysers, the best-known being Old Faithful. The yellow-hued Grand Canyon of the Yellowstone River contains several high waterfalls, while four mountain ranges traverse the park. More than 60 mammal species including gray wolves, grizzly bears, black bears, lynxes, bison, and elk, make this park one of the best wildlife viewing spots in the country.",
"visitors": 4257177.0,
"id": "park_yellowstone",
"location": "44.6,-110.5",
"square_km": 8983.2,
"acres": 2219790.71,
},
],
)
assert resp.status == 200
assert resp == [
{"id": "park_zion", "errors": []},
{"id": "park_yellowstone", "errors": []},
]
@pytest.mark.vcr()
def test_search(app_search):
resp = app_search.search(
engine_name="national-parks-demo", body={"query": "tree", "page": {"size": 2}}
)
assert resp.status == 200
assert resp == {
"meta": {
"alerts": [],
"warnings": [],
"page": {"current": 1, "total_pages": 12, "total_results": 23, "size": 2},
"engine": {"name": "national-parks-demo", "type": "default"},
"request_id": "4999a4ef-b750-4bef-aea2-87f54d9c87b3",
},
"results": [
{
"nps_link": {"raw": "https://www.nps.gov/grsm/index.htm"},
"title": {"raw": "Great Smoky Mountains"},
"date_established": {"raw": "1934-06-15T05:00:00+00:00"},
"world_heritage_site": {"raw": "true"},
"states": {"raw": ["Tennessee", "North Carolina"]},
"description": {
"raw": "The Great Smoky Mountains, part of the Appalachian Mountains, span a wide range of elevations, making them home to over 400 vertebrate species, 100 tree species, and 5000 plant species. Hiking is the park's main attraction, with over 800 miles (1,300 km) of trails, including 70 miles (110 km) of the Appalachian Trail. Other activities include fishing, horseback riding, and touring nearly 80 historic structures."
},
"visitors": {"raw": 11312786.0},
"_meta": {
"id": "park_great-smoky-mountains",
"engine": "national-parks-demo",
"score": 16969184.0,
},
"id": {"raw": "park_great-smoky-mountains"},
"location": {"raw": "35.68,-83.53"},
"square_km": {"raw": 2114.2},
"acres": {"raw": 522426.88},
},
{
"nps_link": {"raw": "https://www.nps.gov/yose/index.htm"},
"title": {"raw": "Yosemite"},
"date_established": {"raw": "1890-10-01T05:00:00+00:00"},
"world_heritage_site": {"raw": "true"},
"states": {"raw": ["California"]},
"description": {
"raw": "Yosemite features sheer granite cliffs, exceptionally tall waterfalls, and old-growth forests at a unique intersection of geology and hydrology. Half Dome and El Capitan rise from the park's centerpiece, the glacier-carved Yosemite Valley, and from its vertical walls drop Yosemite Falls, one of North America's tallest waterfalls at 2,425 feet (739 m) high. Three giant sequoia groves, along with a pristine wilderness in the heart of the Sierra Nevada, are home to a wide variety of rare plant and animal species."
},
"visitors": {"raw": 5028868.0},
"_meta": {
"id": "park_yosemite",
"engine": "national-parks-demo",
"score": 7543302.0,
},
"id": {"raw": "park_yosemite"},
"location": {"raw": "37.83,-119.5"},
"square_km": {"raw": 3082.7},
"acres": {"raw": 761747.5},
},
],
}
@pytest.mark.vcr()
def test_not_authorized(app_search):
app_search.http_auth = None
with pytest.raises(UnauthorizedError) as e:
app_search.list_engines()
assert e.value.status == 401
assert e.value.message == {"error": "You need to sign in before continuing."}
assert e.value.errors == ()
resp = app_search.list_engines(ignore_status=401)
assert resp.status == 401
assert resp == {"error": "You need to sign in before continuing."}
def test_create_signed_search_key():
private_key = "private-"
signed_key = AppSearch.create_signed_search_key(
api_key=private_key,
api_key_name="api-key-name",
search_fields={"first_name": {}},
filters={"status": "available"},
facets=None,
)
assert isinstance(signed_key, str)
assert jwt.decode(signed_key, private_key, algorithms="HS256") == {
"api_key_name": "api-key-name",
"facets": None,
"filters": {"status": "available"},
"search_fields": {"first_name": {}},
}
| true | true |
f73d51a4605556876ee30be66356a98ed6903bd2 | 1,180 | py | Python | trik/Treading.py | m1raynee/trikset.py-typehint | 2176229fb11628f369eb7a090b89c4b34985a7fd | [
"MIT"
] | 1 | 2022-01-23T21:23:15.000Z | 2022-01-23T21:23:15.000Z | trik/Treading.py | m1raynee/trikset-py-typehint | 2176229fb11628f369eb7a090b89c4b34985a7fd | [
"MIT"
] | null | null | null | trik/Treading.py | m1raynee/trikset-py-typehint | 2176229fb11628f369eb7a090b89c4b34985a7fd | [
"MIT"
] | null | null | null | from typing import Callable
def joinThread(treadId: str) -> None:
"""Ожидает завершения указанного потока.
Параметры
---------
treadId: :class:`str`
id потока
"""
raise NotImplementedError
def killThread(treadId: str) -> None:
"""Заканчивает исполнение указанного потока.
Параметры
---------
treadId: :class:`str`
id потока
"""
raise NotImplementedError
def receiveMessage(wait: bool) -> str:
"""Запрашивает принятое сообщение.
Параметры
---------
wait: :class:`bool`
Если `True`, то ожидает, пока не придет сообщение.
"""
raise NotImplementedError
def sendMessage(treadId: str, message: str) -> None:
"""Посылает сообщение указанному потоку.
Параметры
---------
treadId: :class:`str`
id потока
message: :class:`str`
Сообщение
"""
raise NotImplementedError
def startThread(newThreadId: str, functionName: Callable):
"""Запускает переданную в качестве параметра функцию в отдельном потоке.
Параметры
---------
newTreadId: :class:`str`
id нового потока
functionName: Callable
Функция
"""
| 20 | 76 | 0.616102 | from typing import Callable
def joinThread(treadId: str) -> None:
raise NotImplementedError
def killThread(treadId: str) -> None:
raise NotImplementedError
def receiveMessage(wait: bool) -> str:
raise NotImplementedError
def sendMessage(treadId: str, message: str) -> None:
raise NotImplementedError
def startThread(newThreadId: str, functionName: Callable):
| true | true |
f73d531b542c0931ce8dda1c1db069885ad1b953 | 701 | py | Python | test.py | bochainwu/self_learning2021 | a80580a4bb82e280f06b093f19d93ff354f7cd17 | [
"MIT"
] | 5 | 2021-07-17T03:34:19.000Z | 2021-11-16T11:28:24.000Z | test.py | bochainwu/self_learning2021 | a80580a4bb82e280f06b093f19d93ff354f7cd17 | [
"MIT"
] | 1 | 2021-07-18T06:33:30.000Z | 2021-07-19T14:11:14.000Z | test.py | bochainwu/self_learning2021 | a80580a4bb82e280f06b093f19d93ff354f7cd17 | [
"MIT"
] | null | null | null | '''
a = [[1 for j in range(5)],
[2 for j in range(5)],
[3 for j in range(5)],
[4 for j in range(5)],
[5 for j in range(5)],
[6 for j in range(5)]]
for i in range(6):
for j in range(5):
print(a[i][j], end='')
print()
print()
x, y = 0, 0
while True:
if x == 6:
print()
x = 0
y += 1
print(a[x][y] ,end='')
x += 1
if x==6 and y==4:
break
'''
'''
a = 1
a = b
print(b)
for i in range(5):
if a == 1:
print('1')
pass
elif a == 1:
print('2')
else:
print('x')
'''
'''
a, b,c,d = 0, 0 ,0 ,1
for i in range(4):
c, d, a, b = b, c, d, a
print(a,b,c,d)
''' | 14.914894 | 30 | 0.388017 | true | true | |
f73d535f83d4563314565b1875e6c700ddb87bfd | 5,749 | py | Python | pygimli/physics/traveltime/raplot.py | baender/gimli | eb9a2204669cf11209b9577472f61ac70217a191 | [
"Apache-2.0"
] | 1 | 2022-03-27T18:37:08.000Z | 2022-03-27T18:37:08.000Z | pygimli/physics/traveltime/raplot.py | baender/gimli | eb9a2204669cf11209b9577472f61ac70217a191 | [
"Apache-2.0"
] | null | null | null | pygimli/physics/traveltime/raplot.py | baender/gimli | eb9a2204669cf11209b9577472f61ac70217a191 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""""WRITEME"""
import matplotlib.pyplot as plt
import numpy as np
import pygimli as pg
from pygimli.viewer.mpl import createColorBar # , updateColorBar
from .ratools import shotReceiverDistances
def drawTravelTimeData(ax, data, t=None):
"""
Draw first arrival traveltime data into mpl ax a.
data of type \ref DataContainer must contain sensorIdx 's' and 'g'
and thus being numbered internally [0..n)
"""
x = pg.x(data.sensorPositions())
# z = pg.z(data.sensorPositions())
shots = pg.unique(pg.sort(data('s')))
geoph = pg.unique(pg.sort(data('g')))
startOffsetIDX = 0
if min(min(shots), min(geoph)) == 1:
startOffsetIDX = 1
tShow = data('t')
if t is not None:
tShow = t
ax.set_xlim([min(x), max(x)])
ax.set_ylim([max(tShow), -0.002])
ax.figure.show()
for shot in shots:
gIdx = pg.find(data('s') == shot)
sensorIdx = [int(i__ - startOffsetIDX) for i__ in data('g')[gIdx]]
ax.plot(x[sensorIdx], tShow[gIdx], 'x-')
yPixel = ax.transData.inverted().transform_point((1, 1))[1] - \
ax.transData.inverted().transform_point((0, 0))[1]
xPixel = ax.transData.inverted().transform_point((1, 1))[0] - \
ax.transData.inverted().transform_point((0, 0))[0]
# draw shot points
ax.plot(x[[int(i__ - startOffsetIDX) for i__ in shots]],
np.zeros(len(shots)) + 8. * yPixel, 'gv', markersize=8)
# draw geophone points
ax.plot(x[[int(i__ - startOffsetIDX) for i__ in geoph]],
np.zeros(len(geoph)) + 3. * yPixel, 'r^', markersize=8)
ax.grid()
ax.set_ylim([max(tShow), +16. * yPixel])
ax.set_xlim([min(x) - 5. * xPixel, max(x) + 5. * xPixel])
ax.set_xlabel('x-Coordinate [m]')
ax.set_ylabel('Traveltime [ms]')
def plotFirstPicks(ax, data, tt=None, plotva=False, marker='x-'):
"""Naming convention. drawFOO(ax, ... )"""
pg.deprecated("use drawFirstPicks")
return drawFirstPicks(ax=ax, data=data, tt=tt, plotva=plotva,
marker=marker)
def drawFirstPicks(ax, data, tt=None, plotva=False, marker='x-'):
"""plot first arrivals as lines"""
px = pg.x(data)
gx = np.array([px[int(g)] for g in data("g")])
sx = np.array([px[int(s)] for s in data("s")])
if tt is None:
tt = np.array(data("t"))
if plotva:
tt = np.absolute(gx - sx) / tt
uns = np.unique(sx)
cols = plt.cm.tab10(np.arange(10))
for i, si in enumerate(uns):
ti = tt[sx == si]
gi = gx[sx == si]
ii = gi.argsort()
ax.plot(gi[ii], ti[ii], marker, color=cols[i % 10])
ax.plot(si, 0., 's', color=cols[i % 10], markersize=8)
ax.grid(True)
if plotva:
ax.set_ylabel("Apparent velocity (m/s)")
else:
ax.set_ylabel("Traveltime (s)")
ax.set_xlabel("x (m)")
ax.invert_yaxis()
def _getOffset(data, full=False):
"""Return vector of offsets (in m) between shot and receiver."""
pg.deprecated('use shotReceiverDistances') # 190429 ??
return shotReceiverDistances(data, full)
def showVA(data, usePos=True, ax=None, **kwargs):
"""Show apparent velocity as image plot
Parameters
----------
data : pg.DataContainer()
Datacontainer with 's' and 'g' Sensorindieces and 't' traveltimes.
"""
ax, _ = pg.show(ax=ax)
gci = drawVA(ax, data=data, usePos=usePos, **kwargs)
cBar = createColorBar(gci, **kwargs)
return gci, cBar
def drawVA(ax, data, vals=None, usePos=True, pseudosection=False, **kwargs):
"""Draw apparent velocities as matrix into ax
Parameters
----------
ax : mpl.Axes
data : pg.DataContainer()
Datacontainer with 's' and 'g' Sensorindieces and 't' traveltimes.
usePos: bool [True]
Use sensor positions for axes tick labels
pseudosection : bool [False]
Show in pseudosection style.
vals : iterable
Traveltimes, if None data need to contain 't' values.
"""
if isinstance(vals, str):
vals = data(vals)
if vals is None:
vals = data('t')
px = pg.x(data)
gx = np.asarray([px[g] for g in data.id("g")])
sx = np.asarray([px[s] for s in data.id("s")])
offset = shotReceiverDistances(data, full=True)
if min(vals) < 1e-10:
print(vals)
pg.error('zero traveltimes found.')
va = offset / vals
if pseudosection:
midpoint = (gx + sx) / 2
gci = pg.viewer.mpl.dataview.drawVecMatrix(ax, midpoint, offset, va,
queeze=True,
label=pg.unit('as'))
else:
gci = pg.viewer.mpl.dataview.drawVecMatrix(ax, gx, sx, va,
squeeze=True,
label=pg.unit('as'))
# A = np.ones((data.sensorCount(), data.sensorCount())) * np.nan
# for i in range(data.size()):
# A[int(data('s')[i]), int(data('g')[i])] = va[i]
# gci = ax.imshow(A, interpolation='nearest')
# ax.grid(True)
if usePos:
xt = np.arange(0, data.sensorCount(), 50)
ax.set_xticks(xt)
ax.set_xticklabels([str(int(px[xti])) for xti in xt])
ax.set_yticks(xt)
ax.set_yticklabels([str(int(px[xti])) for xti in xt])
return gci
def plotLines(ax, line_filename, step=1):
xz = np.loadtxt(line_filename)
n_points = xz.shape[0]
if step == 2:
for i in range(0, n_points, step):
x = xz[i:i + step, 0]
z = xz[i:i + step, 1]
ax.plot(x, z, 'k-')
if step == 1:
ax.plot(xz[:, 0], xz[:, 1], 'k-')
| 29.182741 | 76 | 0.564968 |
import matplotlib.pyplot as plt
import numpy as np
import pygimli as pg
from pygimli.viewer.mpl import createColorBar
from .ratools import shotReceiverDistances
def drawTravelTimeData(ax, data, t=None):
x = pg.x(data.sensorPositions())
shots = pg.unique(pg.sort(data('s')))
geoph = pg.unique(pg.sort(data('g')))
startOffsetIDX = 0
if min(min(shots), min(geoph)) == 1:
startOffsetIDX = 1
tShow = data('t')
if t is not None:
tShow = t
ax.set_xlim([min(x), max(x)])
ax.set_ylim([max(tShow), -0.002])
ax.figure.show()
for shot in shots:
gIdx = pg.find(data('s') == shot)
sensorIdx = [int(i__ - startOffsetIDX) for i__ in data('g')[gIdx]]
ax.plot(x[sensorIdx], tShow[gIdx], 'x-')
yPixel = ax.transData.inverted().transform_point((1, 1))[1] - \
ax.transData.inverted().transform_point((0, 0))[1]
xPixel = ax.transData.inverted().transform_point((1, 1))[0] - \
ax.transData.inverted().transform_point((0, 0))[0]
ax.plot(x[[int(i__ - startOffsetIDX) for i__ in shots]],
np.zeros(len(shots)) + 8. * yPixel, 'gv', markersize=8)
ax.plot(x[[int(i__ - startOffsetIDX) for i__ in geoph]],
np.zeros(len(geoph)) + 3. * yPixel, 'r^', markersize=8)
ax.grid()
ax.set_ylim([max(tShow), +16. * yPixel])
ax.set_xlim([min(x) - 5. * xPixel, max(x) + 5. * xPixel])
ax.set_xlabel('x-Coordinate [m]')
ax.set_ylabel('Traveltime [ms]')
def plotFirstPicks(ax, data, tt=None, plotva=False, marker='x-'):
pg.deprecated("use drawFirstPicks")
return drawFirstPicks(ax=ax, data=data, tt=tt, plotva=plotva,
marker=marker)
def drawFirstPicks(ax, data, tt=None, plotva=False, marker='x-'):
px = pg.x(data)
gx = np.array([px[int(g)] for g in data("g")])
sx = np.array([px[int(s)] for s in data("s")])
if tt is None:
tt = np.array(data("t"))
if plotva:
tt = np.absolute(gx - sx) / tt
uns = np.unique(sx)
cols = plt.cm.tab10(np.arange(10))
for i, si in enumerate(uns):
ti = tt[sx == si]
gi = gx[sx == si]
ii = gi.argsort()
ax.plot(gi[ii], ti[ii], marker, color=cols[i % 10])
ax.plot(si, 0., 's', color=cols[i % 10], markersize=8)
ax.grid(True)
if plotva:
ax.set_ylabel("Apparent velocity (m/s)")
else:
ax.set_ylabel("Traveltime (s)")
ax.set_xlabel("x (m)")
ax.invert_yaxis()
def _getOffset(data, full=False):
pg.deprecated('use shotReceiverDistances')
return shotReceiverDistances(data, full)
def showVA(data, usePos=True, ax=None, **kwargs):
ax, _ = pg.show(ax=ax)
gci = drawVA(ax, data=data, usePos=usePos, **kwargs)
cBar = createColorBar(gci, **kwargs)
return gci, cBar
def drawVA(ax, data, vals=None, usePos=True, pseudosection=False, **kwargs):
if isinstance(vals, str):
vals = data(vals)
if vals is None:
vals = data('t')
px = pg.x(data)
gx = np.asarray([px[g] for g in data.id("g")])
sx = np.asarray([px[s] for s in data.id("s")])
offset = shotReceiverDistances(data, full=True)
if min(vals) < 1e-10:
print(vals)
pg.error('zero traveltimes found.')
va = offset / vals
if pseudosection:
midpoint = (gx + sx) / 2
gci = pg.viewer.mpl.dataview.drawVecMatrix(ax, midpoint, offset, va,
queeze=True,
label=pg.unit('as'))
else:
gci = pg.viewer.mpl.dataview.drawVecMatrix(ax, gx, sx, va,
squeeze=True,
label=pg.unit('as'))
if usePos:
xt = np.arange(0, data.sensorCount(), 50)
ax.set_xticks(xt)
ax.set_xticklabels([str(int(px[xti])) for xti in xt])
ax.set_yticks(xt)
ax.set_yticklabels([str(int(px[xti])) for xti in xt])
return gci
def plotLines(ax, line_filename, step=1):
xz = np.loadtxt(line_filename)
n_points = xz.shape[0]
if step == 2:
for i in range(0, n_points, step):
x = xz[i:i + step, 0]
z = xz[i:i + step, 1]
ax.plot(x, z, 'k-')
if step == 1:
ax.plot(xz[:, 0], xz[:, 1], 'k-')
| true | true |
f73d538b9d224bd77e936dae668753c1e8e793a8 | 1,658 | py | Python | elasticapm/version.py | bmilescu/apm-agent-python | d4b5378941f5b82406862bbc03f91ebe64559ba9 | [
"BSD-3-Clause"
] | null | null | null | elasticapm/version.py | bmilescu/apm-agent-python | d4b5378941f5b82406862bbc03f91ebe64559ba9 | [
"BSD-3-Clause"
] | null | null | null | elasticapm/version.py | bmilescu/apm-agent-python | d4b5378941f5b82406862bbc03f91ebe64559ba9 | [
"BSD-3-Clause"
] | null | null | null | # BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = (5, 7, 0)
VERSION = ".".join(map(str, __version__))
| 50.242424 | 81 | 0.767189 |
__version__ = (5, 7, 0)
VERSION = ".".join(map(str, __version__))
| true | true |
f73d5437748b81ea8becf68dd199e9ea54911c3d | 1,121 | py | Python | sfaira/data/utils_scripts/create_meta_and_cache.py | johnmous/sfaira | c50240a74530e614ab7681bf9c63b04cb815b361 | [
"BSD-3-Clause"
] | 110 | 2020-09-08T07:47:15.000Z | 2022-03-29T03:33:56.000Z | sfaira/data/utils_scripts/create_meta_and_cache.py | johnmous/sfaira | c50240a74530e614ab7681bf9c63b04cb815b361 | [
"BSD-3-Clause"
] | 405 | 2020-09-15T15:05:46.000Z | 2022-03-16T14:44:23.000Z | sfaira/data/utils_scripts/create_meta_and_cache.py | johnmous/sfaira | c50240a74530e614ab7681bf9c63b04cb815b361 | [
"BSD-3-Clause"
] | 20 | 2021-03-30T15:30:14.000Z | 2022-03-07T12:52:58.000Z | import os
import sfaira
import sys
# Set global variables.
print("sys.argv", sys.argv)
data_path = str(sys.argv[1])
path_meta = str(sys.argv[2])
path_cache = str(sys.argv[3])
processes = int(str(sys.argv[4]))
ds = sfaira.data.dataloaders.Universe(
data_path=data_path, meta_path=path_meta, cache_path=path_cache
)
# Write meta data, cache and test load from cache:
for x in ds.dataset_groups:
for k, v in x.datasets.items():
print(f"SCRIPT: loading {x} {k}")
try:
# Initial load and cache writing:
# Only run this if data set was not already cached to speed up resumed jobs.
if not os.path.exists(v.cache_fn):
v.load(load_raw=False, allow_caching=True)
# Write meta data, cache.
v.write_meta(fn_meta=None, dir_out=path_meta)
# Test load from cache.
v.load(load_raw=False, allow_caching=False)
v.clear()
except ValueError as e:
# Do not abort upon ValueErrors, such as from cell type map bugs.
print(f"SCRIPT WARNING: TO-FIX: ValueError in {k}: {e}")
| 33.969697 | 88 | 0.633363 | import os
import sfaira
import sys
print("sys.argv", sys.argv)
data_path = str(sys.argv[1])
path_meta = str(sys.argv[2])
path_cache = str(sys.argv[3])
processes = int(str(sys.argv[4]))
ds = sfaira.data.dataloaders.Universe(
data_path=data_path, meta_path=path_meta, cache_path=path_cache
)
for x in ds.dataset_groups:
for k, v in x.datasets.items():
print(f"SCRIPT: loading {x} {k}")
try:
if not os.path.exists(v.cache_fn):
v.load(load_raw=False, allow_caching=True)
v.write_meta(fn_meta=None, dir_out=path_meta)
v.load(load_raw=False, allow_caching=False)
v.clear()
except ValueError as e:
print(f"SCRIPT WARNING: TO-FIX: ValueError in {k}: {e}")
| true | true |
f73d552ecc282813a207cf0ea275a12fdeb1d22a | 55,567 | py | Python | tensorlog/testxcomp.py | saraswat/TensorLog | c56cebfa33b5123d5340a7b429e333da09d223d8 | [
"Apache-2.0"
] | 108 | 2016-05-24T16:49:56.000Z | 2022-02-02T19:06:14.000Z | tensorlog/testxcomp.py | saraswat/TensorLog | c56cebfa33b5123d5340a7b429e333da09d223d8 | [
"Apache-2.0"
] | 12 | 2016-09-07T18:04:38.000Z | 2020-12-07T01:18:08.000Z | tensorlog/testxcomp.py | saraswat/TensorLog | c56cebfa33b5123d5340a7b429e333da09d223d8 | [
"Apache-2.0"
] | 22 | 2016-06-17T18:59:18.000Z | 2020-05-28T02:13:59.000Z | # (C) William W. Cohen and Carnegie Mellon University, 2017
import logging
import numpy as np
import os
import unittest
import sys
import collections
import tempfile
from tensorlog import xctargets
if xctargets.tf:
import tensorflow as tf
from tensorlog import tensorflowxcomp
else:
tensorflowxcomp=None
if xctargets.theano:
import theano
from tensorlog import theanoxcomp
else:
theanoxcomp=None
from tensorlog import bpcompiler
from tensorlog import comline
from tensorlog import dataset
from tensorlog import declare
from tensorlog import matrixdb
from tensorlog import learn
from tensorlog import mutil
from tensorlog import parser
from tensorlog import program
from tensorlog import simple
from tensorlog import testtensorlog
from tensorlog import funs
from tensorlog import ops
from tensorlog import learnxcomp as learnxc
from tensorlog.expt import Expt
if xctargets.tf:
tf.logging.set_verbosity(tf.logging.WARN)
TESTED_COMPILERS = []
TESTED_LEARNERS = {}
if xctargets.theano:
for c in [
theanoxcomp.DenseMatDenseMsgCrossCompiler,
theanoxcomp.SparseMatDenseMsgCrossCompiler
]:
TESTED_COMPILERS.append(c)
TESTED_LEARNERS[c]=theanoxcomp.FixedRateGDLearner
if xctargets.tf:
for c in [
tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler,
]:
TESTED_COMPILERS.append(c)
TESTED_LEARNERS[c]=tensorflowxcomp.FixedRateGDLearner
RUN_OLD_INFERENCE_TESTS = False
SAVE_SUMMARIES = False
def close_cross_compiler(xc):
xc.close()
if xctargets.tf and isinstance(xc,tensorflowxcomp.TensorFlowCrossCompiler):
tf.reset_default_graph()
class TestXCSmallProofs(testtensorlog.TestSmallProofs):
def test_if(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).'], 'p(i,o)', 'william', {'susan':1.0})
def test_failure(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).'], 'p(i,o)', 'lottie', {matrixdb.NULL_ENTITY_NAME:1.0})
def test_reverse_if(self):
self.xcomp_check(['p(X,Y):-sister(Y,X).'], 'p(i,o)', 'rachel', {'william':1.0})
def test_or(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).', 'p(X,Y):-sister(X,Y).'], 'p(i,o)', 'william',
{'susan':1.0, 'rachel':1.0, 'lottie':1.0, 'sarah':1.0})
def test_chain(self):
self.xcomp_check(['p(X,Z):-spouse(X,Y),sister(Y,Z).'], 'p(i,o)', 'susan',
{'rachel':1.0, 'lottie':1.0, 'sarah':1.0})
self.xcomp_check(['p(X,Z):-sister(X,Y),child(Y,Z).'], 'p(i,o)', 'william',
{'charlotte':1.0, 'lucas':1.0, 'poppy':1.0, 'caroline':1.0, 'elizabeth':1.0})
def test_mid(self):
self.xcomp_check(['p(X,Y):-sister(X,Y),child(Y,Z).'], 'p(i,o)', 'william',
{'sarah': 1.0, 'rachel': 2.0, 'lottie': 2.0})
def test_nest(self):
self.xcomp_check(['s(X,Y):-spouse(X,Y).','t(X,Z):-spouse(X,Y),s(Y,Z).'], 't(i,o)', 'susan', {'susan': 1.0})
def test_back1(self):
# fails for tensorflowxcomp
self.xcomp_check(['p(X,Y):-spouse(X,Y),sister(X,Z).'], 'p(i,o)', 'william', {'susan': 3.0})
def test_back2(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y),sister(X,Z1),sister(X,Z2).'],'p(i,o)','william',{'susan': 9.0})
def test_rec1(self):
program.DEFAULT_MAXDEPTH=4
self.xcomp_check(['p(X,Y):-spouse(X,Y).','p(X,Y):-p(Y,X).'], 'p(i,o)','william',{'susan': 5.0})
program.DEFAULT_MAXDEPTH=10
self.xcomp_check(['p(X,Y):-spouse(X,Y).','p(X,Y):-p(Y,X).'], 'p(i,o)','william',{'susan': 11.0})
def test_const_output(self):
self.xcomp_check(['sis(X,W):-assign(W,william),child(X,Y).'], 'sis(i,o)', 'sarah', {'william': 1.0})
self.xcomp_check(['sis(X,W):-assign(W,william),child(X,Y).'], 'sis(i,o)', 'lottie', {'william': 2.0})
def test_const_chain1(self):
self.xcomp_check(['p(X,S) :- assign(S,susan),sister(X,Y),child(Y,Z).'],'p(i,o)','william',{'susan': 5.0})
def test_const_chain2(self):
self.xcomp_check(['p(X,Pos) :- assign(Pos,pos),child(X,Y),young(Y).'],'p(i,o)','sarah',{'pos':1.0})
self.xcomp_check(['p(X,Pos) :- assign(Pos,pos),child(X,Y),young(Y).'],'p(i,o)','lottie',{'pos':2.0})
def test_alt_chain(self):
self.xcomp_check(['p(X,W) :- spouse(X,W),sister(X,Y),child(Y,Z).'],'p(i,o)','william',{'susan': 5.0})
pass
def test_proppr1(self):
w = 7*self.db.onehot('r1')+3*self.db.onehot('r2')
self.proppr_xcomp_check(w,['p(X,Y):-sister(X,Y) {r1}.','p(X,Y):-spouse(X,Y) {r2}.'],'p(i,o)',
'william', {'sarah': 7.0, 'rachel': 7.0, 'lottie': 7.0, 'susan': 3.0})
def test_proppr2(self):
w = 3*self.db.onehot('r2')
self.proppr_xcomp_check(w,['p(X,Y):-spouse(Y,X) {r2}.'],'p(i,o)',
'susan', {'william': 3.0})
def test_reuse1(self):
self.xcomp_check(['p(X,Y) :- r(X,Z),r(Z,Y).', 'r(X,Y):-spouse(X,Y).'], 'p(i,o)', 'william',
{'william':1.0})
def _removeZeros(self, sdict):
if True: return sdict
e = sdict[None]
ret = dict([ (k,v-e) for (k,v) in list(sdict.items()) if v != e])
z = sum(ret.values())
for k in ret: ret[k] = ret[k]/z
return ret
def xcomp_check(self,ruleStrings,mode_string,input_symbol,expected_result_dict,compare=False):
self._xcomp_check('vanilla',None,ruleStrings,mode_string,input_symbol,expected_result_dict,compare)
def proppr_xcomp_check(self,weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict):
self._xcomp_check('proppr',weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict)
def _xcomp_check(self,progType,weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict,compare=False):
# run the base class check to see that the inference is correct
if RUN_OLD_INFERENCE_TESTS:
if progType=='proppr':
self.proppr_inference_check(weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict)
else:
self.inference_check(ruleStrings,mode_string,input_symbol,expected_result_dict)
# setup the next round of tests by compiling a tensorlog
# Program - this code is lifted from the testtensorlog
# inference routines
print('xcomp inference for mode',mode_string,'on input',input_symbol)
testtensorlog.softmax_normalize(expected_result_dict)
rules = parser.RuleCollection()
for r in ruleStrings:
rules.add(parser.Parser().parseRule(r))
if progType=='proppr':
prog = program.ProPPRProgram(db=self.db,rules=rules,weights=weightVec)
else:
prog = program.Program(db=self.db,rules=rules)
for compilerClass in TESTED_COMPILERS:
#cross-compile the function
xc = compilerClass(prog)
# evaluate the function and get the output y
#xc.show()
print('== performing eval with',compilerClass,'==')
inferenceFun = xc.inferenceFunction(mode_string)
y = inferenceFun(prog.db.onehot(input_symbol))
# print 'input',xc.getInputName(mode_string),'args,fun
# =',xc.inference(mode_string) theano output will a be (probably
# dense) message, so just compare and check that the maximal
# elements from these two dicts are the same
actual_result_dict = self.db.rowAsSymbolDict(y)
self.check_maxes_in_dicts(actual_result_dict, expected_result_dict)
# check it's normalized
l1_error = abs(sum(actual_result_dict.values()) - 1.0)
#print 'l1_error',l1_error,'actual_result_dict',actual_result_dict,'expected_result_dict',expected_result_dict
self.assertTrue( l1_error < 0.0001)
# also test proofCountFun
proofCountFun = xc.proofCountFunction(mode_string)
pc = proofCountFun(prog.db.onehot(input_symbol))
# theano output will a be (probably dense) message, so
# just compare that maximal elements from these two dicts
# are the same
pc_result_dict = self.db.rowAsSymbolDict(pc)
if len(pc_result_dict)>0:
self.check_maxes_in_dicts(pc_result_dict, expected_result_dict)
print('== eval checks passed ==')
close_cross_compiler(xc)
def check_maxes_in_dicts(self,actual,expected):
def maximalElements(d):
m = max(d.values())
return set(k for k in d if d[k]==m)
actualMaxes = maximalElements(actual)
expectedMaxes = maximalElements(expected)
print('actual',actualMaxes,'expected',expectedMaxes)
for a in actualMaxes:
self.assertTrue(a in expectedMaxes)
for a in expectedMaxes:
self.assertTrue(a in actualMaxes)
class TestXCGrad(testtensorlog.TestGrad):
def setUp(self):
self.db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,'fam.cfacts'))
def test_if(self):
rules = ['p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_if2(self):
rules = ['p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah']), ('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie']), ('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_reverse_if(self):
rules = ['p(X,Y):-parent(Y,X).']
mode = 'p(i,o)'
params = [('parent',2)]
self.xgrad_check(rules, mode, params,
[('lottie',['charlotte'])],
{'parent(charlotte,lottie)': +1,'parent(lucas,lottie)': -1})
def test_chain1(self):
rules = ['p(X,Z):-sister(X,Y),child(Y,Z).']
mode = 'p(i,o)'
self.xgrad_check(rules,mode,
[('sister',2)],
[('william',['caroline','elizabeth'])],
{'sister(william,rachel)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules,mode,
[('child',2)],
[('william',['caroline','elizabeth'])],
{'child(rachel,elizabeth)': +1,'child(lottie,lucas)': -1})
self.xgrad_check(rules,mode,
[('child',2),('sister',2)],
[('william',['caroline','elizabeth'])],
{'child(rachel,elizabeth)': +1,'child(lottie,lucas)': -1, 'sister(william,rachel)': +1,'sister(william,lottie)': -1})
def test_chain2(self):
rules = ['p(X,Z):-spouse(X,Y),sister(Y,Z).']
mode = 'p(i,o)'
self.xgrad_check(rules,mode,
[('sister',2)],
[('susan',['rachel'])],
{'sister(william,rachel)': +1,'sister(william,lottie)': -1})
def test_call1(self):
rules = ['q(X,Y):-sister(X,Y).','p(Z,W):-q(Z,W).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_call2(self):
rules = ['q(X,Y):-sister(X,Y).','p(Z,W):-r(Z,W).','r(Z,W):-q(Z,W).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_split(self):
rules = ['p(X,Y):-sister(X,Y),child(Y,Z),young(Z).']
mode = 'p(i,o)'
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'child(lottie,lucas)': +1,'child(lottie,charlotte)': +1,'child(sarah,poppy)': -1})
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,lottie)': +1,'sister(william,sarah)': -1})
def test_or(self):
rules = ['p(X,Y):-child(X,Y).', 'p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': -1,'sister(william,lottie)': -1})
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'child(william,charlie)': +1,'child(william,josh)': -1})
params = [('child',2),('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'child(william,charlie)': +1,'child(william,josh)': -1,'sister(william,rachel)': +1,'sister(william,sarah)': -1})
def test_weighted_vec(self):
rules = ['p(X,Y):-sister(X,Y),assign(R,r1),feat(R).','p(X,Y):-child(X,Y),assign(R,r2),feat(R).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','charlie'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': -1})
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','charlie'])],
{'child(william,charlie)': +1,'child(william,josh)': -1})
params = [('feat',1)]
self.xgrad_check(rules, mode, params,
[('william',['josh','charlie'])],
{'feat(r1)': -1,'feat(r2)': +1})
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah','lottie'])],
{'feat(r1)': +1,'feat(r2)': -1})
def learnxc_check(self,rule_strings,mode_string,params,xyPairs,expected):
print("XLearner loss/grad eval")
rules = testtensorlog.rules_from_strings(rule_strings)
prog = program.Program(db=self.db,rules=rules)
mode = declare.ModeDeclaration(mode_string)
prog.db.clearParameterMarkings()
for (functor,arity) in params:
prog.db.markAsParameter(functor,arity)
# TODO: not working yet for mini-batches so check each example
# individually
for x,ys in xyPairs:
data = testtensorlog.DataBuffer(self.db)
data.add_data_symbols(x,ys)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(prog)
print('learner check for compiler',xc.__class__)
learner = learnxc.XLearner(prog,xc)
paramsWithUpdates = learner.crossEntropyGrad(mode,data.get_x(),data.get_y())
updates_with_string_keys = {}
for (functor,arity),up in paramsWithUpdates:
print('testxcomp update for',functor,arity,'is',up)
upDict = prog.db.matrixAsPredicateFacts(functor,arity,up)
print('upDict',upDict)
for fact,grad_of_fact in list(upDict.items()):
# need to flip for cross-compilers
updates_with_string_keys[str(fact)] = -grad_of_fact
self.check_directions(updates_with_string_keys,expected)
def xgrad_check(self,rule_strings,mode_string,params,xyPairs,expected):
print("direct loss/grad eval")
rules = testtensorlog.rules_from_strings(rule_strings)
prog = program.Program(db=self.db,rules=rules)
prog.db.clearParameterMarkings()
for (functor,arity) in params:
prog.db.markAsParameter(functor,arity)
for x,ys in xyPairs:
data = testtensorlog.DataBuffer(self.db)
data.add_data_symbols(x,ys)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(prog)
print('grad check for compiler',xc.__class__)
gradFun = xc.dataLossGradFunction(mode_string)
updates_with_string_keys = {}
paramsWithUpdates = gradFun(data.get_x(),data.get_y())
for (functor,arity),up in paramsWithUpdates:
upDict = prog.db.matrixAsPredicateFacts(functor,arity,up)
for fact,grad_of_fact in list(upDict.items()):
# need to flip for cross-compilers
updates_with_string_keys[str(fact)] = -grad_of_fact
self.check_directions(updates_with_string_keys,expected)
self.learnxc_check(rule_strings,mode_string,params,xyPairs,expected)
close_cross_compiler(xc)
class TestXCProPPR(testtensorlog.TestProPPR):
def setUp(self):
super(TestXCProPPR,self).setUp()
def debug(self):
return self
def evalxc(self,xc,input):
inferenceFun = xc.inferenceFunction('predict/io')
print(inferenceFun)
rawPred = inferenceFun(input)
# trim small numbers to zero
pred = mutil.mapData(lambda d:np.clip((d - 1e-5),0.00,9999.99), rawPred)
pred.eliminate_zeros()
return pred
def testNativeRow(self):
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
for i in range(self.numExamples):
pred = self.evalxc(xc, self.X.getrow(i))
d = self.prog.db.rowAsSymbolDict(pred)
uniform = {'pos':0.5,'neg':0.5}
self.check_dicts(d,uniform)
close_cross_compiler(xc)
def testNativeMatrix(self):
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
xc.ensureCompiled(self.mode,inputs=None)
pred = self.prog.eval(self.mode,[self.X])
d0 = self.prog.db.matrixAsSymbolDict(pred)
for i,d in list(d0.items()):
uniform = {'pos':0.5,'neg':0.5,}
self.check_dicts(d,uniform)
close_cross_compiler(xc)
def testGradVector(self):
data = testtensorlog.DataBuffer(self.prog.db)
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
learner = learn.OnePredFixedRateGDLearner(self.prog)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
#xc.compile(self.mode)
gradFun = xc.dataLossGradFunction('predict/io')
for i in range(X.shape[0]):
print("example",i)
updates = learner.crossEntropyGrad(declare.ModeDeclaration('predict(i,o)'),X[i],Y[i])
w0 = updates[('weighted',1)].sum(axis=0)
print(w0)
updates = gradFun(X[i],Y[i])
paramKey,w = updates[0]
print(w)
# w is different from the w in the corresponding testtensorlog test,
# which is a crossEntropy gradient for each example, but it should have
# opposite directions
nrow,ncol = w.shape
for i in range(nrow):
for j in range(ncol):
self.assertTrue((w[i,j]==0) == (w0[i,j]==0))
self.assertTrue(w[i,j] * w0[i,j] <= 0)
def testGradMatrix(self):
data = testtensorlog.DataBuffer(self.prog.db)
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
learner = learn.OnePredFixedRateGDLearner(self.prog)
updates = learner.crossEntropyGrad(declare.ModeDeclaration('predict(i,o)'),X,Y)
w0 = updates[('weighted',1)].sum(axis=0)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
#xc.compile(self.mode)
gradFun = xc.dataLossGradFunction('predict/io')
updates = gradFun(X,Y)
paramKey,w = updates[0]
# w is different from the w in the corresponding testtensorlog test,
# which is a crossEntropy gradient for each example, but it should have
# opposite directions
nrow,ncol = w.shape
for i in range(nrow):
for j in range(ncol):
self.assertTrue((w[i,j]==0) == (w0[i,j]==0),"i=%d,j=%d,w=%g,w0=%g"%(i,j,w[i,j],w0[i,j]))
self.assertTrue(w[i,j] * w0[i,j] <= 0.0,"i=%d,j=%d,w=%g,w0=%g"%(i,j,w[i,j],w0[i,j]))
close_cross_compiler(xc)
def testMultiLearn1(self):
pass
def testLearn(self):
mode = declare.ModeDeclaration('predict(i,o)')
modestr = 'predict/io'
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
for compilerClass in TESTED_COMPILERS:
self.prog.setRuleWeights()
self.prog.setFeatureWeights()
if SAVE_SUMMARIES:
xc = compilerClass(self.prog,compilerClass.__name__+".summary")
else:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
v = self.prog.db.getParameter('weighted',1)
d = self.prog.db.rowAsSymbolDict(v)
# sanity check a couple of values
self.assertTrue(d['little_pos'] == d['little_neg'])
self.assertTrue(d['big_pos'] == d['big_neg'])
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
lossFun = xc.dataLossFunction('predict/io')
loss0 = lossFun(X,Y)
print('initial train data loss',loss0)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
loss1 = lossFun(TX,TY)
print('initial test data loss',loss1)
P = learner.predict('predict/io',X)
#acc0 = xc.accuracy('predict/io',X,Y)
acc0 = learner.accuracy(Y,P)
print('initial train accuracy',acc0)
TP = learner.predict('predict/io',TX)
#acc1 = xc.accuracy('predict/io',TX,TY)
acc1 = learner.accuracy(TY,TP)
print('initial test accuracy',acc1)
print('params to optimize',xc.prog.getParamList())
print('vars to optimize',xc.getParamVariables('predict/io'))
# xc.optimizeDataLoss('predict/io', optimizer, X, Y, epochs=20)
learner.trainMode('predict/io',X,Y)
loss2 = lossFun(X,Y)
print('final train data loss',loss2)
loss3 = lossFun(TX,TY)
print('final test data loss',loss3)
P2 = learner.predict('predict/io',X)
#acc2 = xc.accuracy('predict/io',X,Y)
acc2 = learner.accuracy(Y,P2)
print('final train accuracy',acc2)
TP2 = learner.predict('predict/io',TX)
#acc3 = xc.accuracy('predict/io',TX,TY)
acc3 = learner.accuracy(TY,TP2)
print('final test accuracy',acc3)
xc.exportAllLearnedParams()
v = self.prog.db.getParameter('weighted',1)
d = self.prog.db.rowAsSymbolDict(v)
# sanity check a couple of values
self.assertTrue(d['little_pos'] > d['little_neg'])
self.assertTrue(d['big_pos'] < d['big_neg'])
close_cross_compiler(xc)
self.assertTrue(acc2>=acc0)
self.assertTrue(acc3>=acc1)
self.assertTrue(loss2<loss0)
self.assertTrue(loss2<loss1)
self.assertTrue(acc2>=0.9)
self.assertTrue(acc2==1.0)
def testDatasetPredict(self):
mode = declare.ModeDeclaration('predict(i,o)')
modestr = 'predict/io'
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
for compilerClass in TESTED_COMPILERS:
self.prog.setRuleWeights()
self.prog.setFeatureWeights()
if SAVE_SUMMARIES:
xc = compilerClass(self.prog,compilerClass.__name__+".summary")
else:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
P = learner.predict(mode,X)
print("X",X.shape)
print("P",P.shape)
self.assertTrue(X.shape==P.shape)
P = learner.datasetPredict(dataset.Dataset({mode:X},{mode:Y}))
print("X",X.shape)
print("P",P.getX(mode).shape)
self.assertTrue(X.shape==P.getX(mode).shape)
return xc,learner,X,Y,P
def testExptScaffold(self):
mode = declare.ModeDeclaration('predict(i,o)')
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
self.prog.setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
Expt({'prog':self.prog,
'trainData':dataset.Dataset({mode:X},{mode:Y}),
'testData':dataset.Dataset({mode:TX},{mode:TY}),
'targetMode':mode,
'learner':learner
}).run()
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testExpt(self):
mode = declare.ModeDeclaration('predict(i,o)')
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(self.prog)
xc.runExpt(
prog=self.prog,
trainData=dataset.Dataset({mode:X},{mode:Y}),
testData=dataset.Dataset({mode:TX},{mode:TY}),
targetMode=mode)
close_cross_compiler(xc)
class TestXCOpGen(unittest.TestCase):
# TODO tests for other xcompilers?
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyTypes(self):
matrixdb.conf.ignore_types = False
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_small_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
mode = list(trainData.keys())[0]
docs,labels = trainData[mode]
xc = tlog.get_cross_compiler()
ops = xc.possibleOps(docs,'doc')
print('doc ops',ops)
self.assertTrue(len(ops)==1)
(words,wordType) = ops[0]
self.assertTrue(wordType=='word')
ops = xc.possibleOps(words,'word')
self.assertTrue(len(ops)==3)
pairs = None
for (expr,exprType) in ops:
if exprType=='labelWordPair':
pairs = expr
break
self.assertTrue(pairs is not None)
ops = xc.possibleOps(pairs,'labelWordPair')
self.assertTrue(len(ops)==2)
for (expr,exprType) in ops:
self.assertTrue(exprType=='word')
close_cross_compiler(xc)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyIgnoringTypes(self):
matrixdb.conf.ignore_types = True
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_small_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
mode = list(trainData.keys())[0]
docs,labels = trainData[mode]
xc = tlog.get_cross_compiler()
ops = xc.possibleOps(docs)
binary_predicates = [functor for (functor,arity) in tlog.db.matEncoding if arity==2]
self.assertTrue(len(ops) == len(binary_predicates)*2)
for x in ops:
# ops should just be tensors
self.assertFalse(isinstance(x,tuple))
close_cross_compiler(xc)
class TestXCExpt(unittest.TestCase):
def testTCToyTypes_wscaffold(self):
matrixdb.conf.ignore_types = False
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
optdict['prog'].setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(optdict['prog'])
learner = TESTED_LEARNERS[compilerClass](optdict['prog'],xc)
Expt({
'prog':optdict['prog'],
'trainData':optdict['trainData'],
'testData':optdict['testData'],
'learner':learner,
'targetMode':declare.asMode("predict/io")
}).run()
pbDoc = xc.db.onehot('pb','doc')
self.checkXC(xc,'predict/io',pbDoc,{'negPair':115,'posPair':115,'hasWord':59,'weighted':115,'label':5})
# some checks on the output of pprint
lines = xc.pprint('predict/io')
self.assertTrue(lines[0].find("SoftMaxFunction") >= 0)
self.assertTrue(lines[1].find("SumFunction") >= 0)
self.assertEqual(len(lines), 16)
# some checks on misc xcomp API
self.assertEqual(xc.inferenceOutputType('predict/io'),'label')
pbId = xc.asSymbolId('pb',typeName='doc')
pbSym = xc.asSymbol(pbId,typeName='doc')
self.assertEqual(pbSym,'pb')
self.assertEqual(xc.asSymbolId('this does not appear in the data',typeName='doc'), -1)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyTypes(self):
matrixdb.conf.ignore_types = False
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(optdict['prog'])
xc.runExpt(
prog=optdict['prog'],
trainData=optdict['trainData'],
testData=optdict['testData'],
targetMode=declare.asMode("predict/io"))
# check trainability
for (functor,arity) in xc.db.matEncoding:
v = xc.parameterFromDBToVariable(functor,arity)
if v is not None:
vIsTrainable = (v in tf.trainable_variables())
vIsParameter = ((functor,arity) in xc.db.paramSet)
self.assertEqual(vIsTrainable,vIsParameter)
pbDoc = xc.db.onehot('pb','doc')
self.checkXC(xc,'predict/io',pbDoc,{'negPair':115,'posPair':115,'hasWord':59,'weighted':115,'label':5})
# some checks on the output of pprint
lines = xc.pprint('predict/io')
self.assertTrue(lines[0].find("SoftMaxFunction") >= 0)
self.assertTrue(lines[1].find("SumFunction") >= 0)
self.assertEqual(len(lines), 16)
# some checks on misc xcomp API
self.assertEqual(xc.inferenceOutputType('predict/io'),'label')
pbId = xc.asSymbolId('pb',typeName='doc')
pbSym = xc.asSymbol(pbId,typeName='doc')
self.assertEqual(pbSym,'pb')
self.assertEqual(xc.asSymbolId('this does not appear in the data',typeName='doc'), -1)
close_cross_compiler(xc)
def testTCToyIgnoringTypes_wscaffold(self):
matrixdb.conf.ignore_types = True
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
optdict['prog'].setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(optdict['prog'])
learner = TESTED_LEARNERS[compilerClass](optdict['prog'],xc)
Expt({
'prog':optdict['prog'],
'trainData':optdict['trainData'],
'testData':optdict['testData'],
'learner':learner,
'targetMode':declare.asMode("predict/io")
}).run()
pbDoc = xc.db.onehot('pb')
self.checkXC(xc,'predict/io',pbDoc,collections.defaultdict(lambda:191))
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyIgnoringTypes(self):
matrixdb.conf.ignore_types = True
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(optdict['prog'])
xc.runExpt(
prog=optdict['prog'],
trainData=optdict['trainData'],
testData=optdict['testData'],
targetMode=declare.asMode("predict/io"))
pbDoc = xc.db.onehot('pb')
self.checkXC(xc,'predict/io',pbDoc,collections.defaultdict(lambda:191))
close_cross_compiler(xc)
def checkXC(self,xc,mode,rawInput,expectedCols):
print('matrixdb.conf.ignore_types',matrixdb.conf.ignore_types)
db = xc.db
for (functor,arity),mat in list(db.matEncoding.items()):
print(functor,arity,'shape',mat.shape)
r,c = mat.shape
self.assertEqual(c,expectedCols[functor])
inferenceFun = xc.inferenceFunction(mode)
y = inferenceFun(rawInput)
r,c = y.shape
self.assertEqual(c,expectedCols['label'])
class TestMultiModeXC(unittest.TestCase):
def setUp(self):
self.db = matrixdb.MatrixDB.loadFile(
os.path.join(testtensorlog.TEST_DATA_DIR,'matchtoy.cfacts'))
self.prog = program.ProPPRProgram.loadRules(
os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.ppr"),db=self.db)
self.dset = dataset.Dataset.loadExamples(
self.db, os.path.join(testtensorlog.TEST_DATA_DIR,'matchtoy-train.exam'),proppr=False)
self.prog.setAllWeights()
def testInScaffold(self):
print(TESTED_COMPILERS)
self.assertTrue(self.dset.modesToLearn() > 1)
self.prog.setAllWeights()
for compilerClass in TESTED_COMPILERS:
print(compilerClass)
xc = compilerClass(self.prog)
# compile everything
for mode in self.dset.modesToLearn():
xc.ensureCompiled(mode)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc)
testAcc,testXent = Expt({
'prog':self.prog,
'trainData':self.dset,
'testData':self.dset,
'learner':learner,
'savedTestPredictions':'TestMultiModeXC.testInScaffold.%s.solutions.txt'%compilerClass.__name__
}).run()
print(testAcc)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testIt(self):
self.assertTrue(self.dset.modesToLearn() > 1)
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(self.prog)
# compile everything
for mode in self.dset.modesToLearn():
xc.ensureCompiled(mode,inputs=None)
# check the variables
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
session = tf.Session()
session.run(tf.global_variables_initializer())
# set up for training
trainStep = {}
for mode in self.dset.modesToLearn():
(dataLossArgs,dataLossExpr) = xc.dataLoss(mode)
trainStep[mode] = optimizer.minimize(dataLossExpr, var_list=xc.getParamVariables(mode))
# train
for i in range(2): #epochs
for mode in self.dset.modesToLearn():
X = self.dset.getX(mode)
Y = self.dset.getY(mode)
fd = xc.getFeedDict(mode,X,Y,wrapped=False)
session.run(trainStep[mode],feed_dict=fd)
# test
for mode in self.dset.modesToLearn():
X = self.dset.getX(mode)
Y = self.dset.getY(mode)
Y_ = xc.inferenceFunction(mode)(X)
acc = xc.accuracy(mode,X,Y)
print('mode',mode,'acc',acc)
session.close()
close_cross_compiler(xc)
class TestMatParams(unittest.TestCase):
def setUp(self):
self.cacheDir = tempfile.mkdtemp()
def cacheFile(self,fileName):
return os.path.join(self.cacheDir,fileName)
def testMToyMatParam(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.ppr"))
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy-train.exam"))
tlog.db.markAsParameter('dabbrev',2)
factDict = tlog.db.matrixAsPredicateFacts('dabbrev',2,tlog.db.matEncoding[('dabbrev',2)])
print('before learning',len(factDict),'dabbrevs')
self.assertTrue(len(factDict)==5)
for f in sorted(factDict.keys()):
print('>',str(f),factDict[f])
# expt pipeline
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=TY.shape, name='tensorlog/trueY')
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
for i in range(5):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
tlog.set_all_db_params_to_learned_values(session)
# params = {'prog':prog,'trainData':trainData, 'testData':testData}
# result = expt.Expt(params).run()
# factDict = db.matrixAsPredicateFacts('dabbrev',2,db.matEncoding[('dabbrev',2)])
# print 'after learning',len(factDict),'dabbrevs'
# for f in sorted(factDict.keys()):
# print '>',str(f),factDict[f]
# self.assertTrue(len(factDict)>5)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
class TestSimple(unittest.TestCase):
def testEmptyRules(self):
# should not throw an error
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
def testIncrementalDBLoad(self):
b = simple.Builder()
predict,label,hasWord,posPair,negPair = b.predicates("predict,label,hasWord,posPair,negPair")
doc_t,label_t,word_t,labelWordPair_t = b.types("doc_t,label_t,word_t,labelWordPair_t")
b.schema += predict(doc_t,label_t) & label(label_t)
b.schema += hasWord(doc_t,word_t) & posPair(word_t,labelWordPair_t) & negPair(word_t,labelWordPair_t)
for basename in "textcattoy_corpus.cfacts textcattoy_labels.cfacts textcattoy_pairs.cfacts".split(" "):
b.db += os.path.join(testtensorlog.TEST_DATA_DIR, basename)
tlog = simple.Compiler(db=b.db)
for (functor,arity,nnz) in [('hasWord',2,99),('label',1,2),('negPair',2,56)]:
m = tlog.db.matEncoding[(functor,arity)]
self.assertTrue(m.nnz == nnz)
def testBatch(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
# test a round-trip serialization
# saves the db
cacheDir = tempfile.mkdtemp()
db_file = os.path.join(cacheDir,'simple.db')
tlog.set_all_db_params_to_learned_values(session)
tlog.serialize_db(db_file)
# load everything into a new graph and don't reset the learned params
new_graph = tf.Graph()
with new_graph.as_default():
tlog2 = simple.Compiler(
db=db_file,
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
autoset_db_params=False)
# reconstruct the accuracy measure
inference2 = tlog2.inference(mode)
trueY2 = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY2')
correct2 = tf.equal(tf.argmax(trueY2,1), tf.argmax(inference2,1))
accuracy2 = tf.reduce_mean(tf.cast(correct2, tf.float32))
# eval accuracy in a new session
session2 = tf.Session()
session2.run(tf.global_variables_initializer())
test_batch_fd2 = {tlog2.input_placeholder_name(mode):UX, trueY2.name:UY}
acc3 = session2.run(accuracy2, feed_dict=test_batch_fd2)
print('accuracy after round-trip serialization',acc3)
self.assertTrue(acc3>=0.9)
session.close()
def testMinibatch(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
self.runTextCatLearner(tlog)
def runTextCatLearner(self,tlog):
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1, end=' ')
for mode,(TX,TY) in tlog.minibatches(trainData,batch_size=2):
print('.', end=' ')
train_minibatch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session.run(train_step, feed_dict=train_minibatch_fd)
print('epoch',i+1,'finished')
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
session.close()
def testBuilder1(self):
b = simple.Builder()
X,Y,Z = b.variables("X Y Z")
aunt,parent,sister,wife = b.predicates("aunt parent sister wife")
uncle = b.predicate("uncle")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y)
b += aunt(X,Y) <= parent(X,Z) & sister(Z,Y)
r1 = b.rule_id("ruleid_t","r1")
r2 = b.rule_id("ruleid_t","r2")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y) // r1
b += aunt(X,Y) <= parent(X,Z) & sister(Z,Y) // r2
feature,description = b.predicates("feature description")
weight = b.predicate("weight")
F = b.variable("F")
D = b.variable("D")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y) // (weight(F) | description(X,D) & feature(X,F))
b.rules.listing()
rs = b.rules.rulesFor(parser.Goal('aunt',[X,Y]))
self.assertEqual(str(rs[0]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y).")
self.assertEqual(str(rs[1]), "aunt(X,Y) :- parent(X,Z), sister(Z,Y).")
self.assertEqual(str(rs[2]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y) {weight(R1) : assign(R1,r1,ruleid_t)}.")
self.assertEqual(str(rs[3]), "aunt(X,Y) :- parent(X,Z), sister(Z,Y) {weight(R2) : assign(R2,r2,ruleid_t)}.")
self.assertEqual(str(rs[4]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y) {weight(F) : description(X,D),feature(X,F)}.")
def testBuilder2(self):
b = simple.Builder()
predict,assign,weighted,hasWord,posPair,negPair = b.predicates("predict assign weighted hasWord posPair negPair")
X,Pos,Neg,F,W = b.variables("X Pos Neg F W")
b += predict(X,Pos) <= assign(Pos,'pos','label') // (weighted(F) | hasWord(X,W) & posPair(W,F))
b += predict(X,Neg) <= assign(Neg,'neg','label') // (weighted(F) | hasWord(X,W) & negPair(W,F))
dbSpec = os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts")
self.runTextCatLearner(simple.Compiler(db=dbSpec,prog=b.rules))
def testBuilder3(self):
b = simple.Builder()
predict,assign,weighted,hasWord,posPair,negPair,label = b.predicates("predict assign weighted hasWord posPair negPair label")
doc_t,label_t,word_t,labelWordPair_t = b.types("doc_t label_t word_t labelWordPair_t")
b.schema += predict(doc_t,label_t)
b.schema += hasWord(doc_t,word_t)
b.schema += posPair(word_t,labelWordPair_t)
b.schema += negPair(word_t,labelWordPair_t)
b.schema += label(label_t)
X,Pos,Neg,F,W = b.variables("X Pos Neg F W")
b.rules += predict(X,Pos) <= assign(Pos,'pos','label_t') // (weighted(F) | hasWord(X,W) & posPair(W,F))
b.rules += predict(X,Neg) <= assign(Neg,'neg','label_t') // (weighted(F) | hasWord(X,W) & negPair(W,F))
# use the untyped version of the facts to make sure the schema works
b.db = os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy.cfacts")
self.runTextCatLearner(simple.Compiler(db=b.db, prog=b.rules))
class TestReparameterizationAndTypedLoading(unittest.TestCase):
def testBugWasFixed(self):
# use the untyped version of the facts to make sure the schema works
db = matrixdb.MatrixDB()
db.addLines(["# :- r(lo_or_hi_t)\n",
"\t".join("r low 0.1".split()) + "\n",
"\t".join("r hi 0.9".split()) + "\n"])
db.markAsParameter('r',1)
prog = program.Program(db=db)
typeName = db.schema.getArgType("r",1,0)
idLow = db.schema.getId(typeName,"low")
idHi = db.schema.getId(typeName,"hi")
db_r = db.matEncoding[('r',1)]
self.approxEqual(db_r[0,idLow], 0.1)
self.approxEqual(db_r[0,idHi], 0.9)
xc = tensorflowxcomp.SparseMatDenseMsgCrossCompiler(prog)
v_r = xc._vector(declare.asMode("r(i)"))
session = tf.Session()
session.run(tf.global_variables_initializer())
xc.exportAllLearnedParams()
print('exported to xc',db.matEncoding[('r',1)])
db_r = db.matEncoding[('r',1)]
self.approxEqual(db_r[0,idLow], 0.1)
self.approxEqual(db_r[0,idHi], 0.9)
def approxEqual(self,a,b):
self.assertTrue(abs(float(a)-b) < 0.0001)
class TestPlugins(unittest.TestCase):
def test_identity_io(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp1(Pos,Y) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp1(Neg,Y) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp1/io', lambda x:x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_identity_oi(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp2(Y,Pos) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp2(Y,Neg) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp2/oi', lambda x:x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_double_io1(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp3(Pos,Y) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp3(Neg,Y) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp3/io', lambda x:2*x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_double_io2(self):
ruleStrings = ['predict(X,Pos) :- assign(Pos,pos,label) {weighted(F): hasWord(X,W),double(W,W2),posPair(W2,F)}.',
'predict(X,Neg) :- assign(Neg,neg,label) {weighted(F2): hasWord(X,W),negPair(W,F),double(F,F2)}.']
plugins = program.Plugins()
plugins.define('double/io', lambda x:2*x, lambda inputType:inputType)
self.check_learning_with_udp(ruleStrings,plugins)
def test_kw_i(self):
ruleStrings = ['predict(X,Pos) :- assign(Pos,pos,label),hasWord(X,W),poskw(W).',
'predict(X,Neg) :- assign(Neg,neg,label),hasWord(X,W),negkw(W).']
plugins = program.Plugins()
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
poskw_v = (db.onehot('little','word') + db.onehot('red','word')).todense()
negkw_v = (db.onehot('big','word') + db.onehot('job','word') + db.onehot('huge','word')).todense()
plugins.define('poskw/i', lambda:poskw_v, lambda:'word')
plugins.define('negkw/i', lambda:negkw_v, lambda:'word')
self.check_udp(ruleStrings,plugins)
def check_udp(self,ruleStrings,plugins):
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(testData.keys())[0]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
session.close()
# TOFIX needs some work to pass
# - you can't do polytree BP with multiple inputs
# - so there's not a simple fix
# - probably do this: (1) treat inputs to leftmost userDef as outputs (2) run message-passing for those outputs
# (3) add the user def operator (4) repeat .... (5) when there are no more plugins
def notest_isect_iio(self):
bpcompiler.conf.trace = True
ruleStrings = ['predict(X,Y) :- hasWord(X,W),posPair(W,P1),negPair(W,P2),isect(P1,P2,Y).']
plugins = program.Plugins()
plugins.define('isect/iio', lambda x1,x2:x1*x2, lambda t1,t2:t1)
self.assertTrue(plugins.isDefined(declare.asMode('isect/iio')))
self.check_learning_with_udp(ruleStrings,plugins)
def argmax(self):
bpcompiler.conf.trace = True
ruleStrings = ['predict(X,Y):-olympics(X,Z),nations(Z),argmax(Z,Y).']
plugins = program.Plugins()
plugins.define('argmax/io',lambda x1:tf.nn.softmax(x1), lambda t1:t1)
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,'argmax.cfacts'))
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
prog.setAllWeights()
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
data = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"argmax.exam"))
mode = list(data.keys())[0]
UX,UY = data[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0>0.9)
session.close()
# acc0 = session.run(inference, feed_dict=test_batch_fd)
# print "inference results:"
# print acc0
# print np.argmax(acc0,1)
# print "trueY:"
# print UY
# print np.argmax(UY,1)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def check_learning_with_udp(self,ruleStrings,plugins,dbfile=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts")):
db = matrixdb.MatrixDB.loadFile(dbfile)
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
prog.setAllWeights()
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
session.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# default is to test on everything adding command line arguments
# 'tensorflow' 'theano' 'sparse' 'dense' filters the list (so
# 'testxcomp.py tensorflow sparse' will run just
# tensorflowxcomp.SparseMatDenseMsgCrossCompiler)
if 'theano' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__module__.endswith("theanoxcomp")]
if 'tensorflow' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__module__.endswith("tensorflowxcomp")]
if 'dense' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__name__.startswith("Dense")]
if 'sparse' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__name__.startswith("Sparse")]
sys.argv = [a for a in sys.argv if a not in "theano tensorflow dense sparse".split()]
print('TESTED_COMPILERS',TESTED_COMPILERS)
unittest.main()
| 43.175602 | 138 | 0.655857 |
import logging
import numpy as np
import os
import unittest
import sys
import collections
import tempfile
from tensorlog import xctargets
if xctargets.tf:
import tensorflow as tf
from tensorlog import tensorflowxcomp
else:
tensorflowxcomp=None
if xctargets.theano:
import theano
from tensorlog import theanoxcomp
else:
theanoxcomp=None
from tensorlog import bpcompiler
from tensorlog import comline
from tensorlog import dataset
from tensorlog import declare
from tensorlog import matrixdb
from tensorlog import learn
from tensorlog import mutil
from tensorlog import parser
from tensorlog import program
from tensorlog import simple
from tensorlog import testtensorlog
from tensorlog import funs
from tensorlog import ops
from tensorlog import learnxcomp as learnxc
from tensorlog.expt import Expt
if xctargets.tf:
tf.logging.set_verbosity(tf.logging.WARN)
TESTED_COMPILERS = []
TESTED_LEARNERS = {}
if xctargets.theano:
for c in [
theanoxcomp.DenseMatDenseMsgCrossCompiler,
theanoxcomp.SparseMatDenseMsgCrossCompiler
]:
TESTED_COMPILERS.append(c)
TESTED_LEARNERS[c]=theanoxcomp.FixedRateGDLearner
if xctargets.tf:
for c in [
tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler,
]:
TESTED_COMPILERS.append(c)
TESTED_LEARNERS[c]=tensorflowxcomp.FixedRateGDLearner
RUN_OLD_INFERENCE_TESTS = False
SAVE_SUMMARIES = False
def close_cross_compiler(xc):
xc.close()
if xctargets.tf and isinstance(xc,tensorflowxcomp.TensorFlowCrossCompiler):
tf.reset_default_graph()
class TestXCSmallProofs(testtensorlog.TestSmallProofs):
def test_if(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).'], 'p(i,o)', 'william', {'susan':1.0})
def test_failure(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).'], 'p(i,o)', 'lottie', {matrixdb.NULL_ENTITY_NAME:1.0})
def test_reverse_if(self):
self.xcomp_check(['p(X,Y):-sister(Y,X).'], 'p(i,o)', 'rachel', {'william':1.0})
def test_or(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y).', 'p(X,Y):-sister(X,Y).'], 'p(i,o)', 'william',
{'susan':1.0, 'rachel':1.0, 'lottie':1.0, 'sarah':1.0})
def test_chain(self):
self.xcomp_check(['p(X,Z):-spouse(X,Y),sister(Y,Z).'], 'p(i,o)', 'susan',
{'rachel':1.0, 'lottie':1.0, 'sarah':1.0})
self.xcomp_check(['p(X,Z):-sister(X,Y),child(Y,Z).'], 'p(i,o)', 'william',
{'charlotte':1.0, 'lucas':1.0, 'poppy':1.0, 'caroline':1.0, 'elizabeth':1.0})
def test_mid(self):
self.xcomp_check(['p(X,Y):-sister(X,Y),child(Y,Z).'], 'p(i,o)', 'william',
{'sarah': 1.0, 'rachel': 2.0, 'lottie': 2.0})
def test_nest(self):
self.xcomp_check(['s(X,Y):-spouse(X,Y).','t(X,Z):-spouse(X,Y),s(Y,Z).'], 't(i,o)', 'susan', {'susan': 1.0})
def test_back1(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y),sister(X,Z).'], 'p(i,o)', 'william', {'susan': 3.0})
def test_back2(self):
self.xcomp_check(['p(X,Y):-spouse(X,Y),sister(X,Z1),sister(X,Z2).'],'p(i,o)','william',{'susan': 9.0})
def test_rec1(self):
program.DEFAULT_MAXDEPTH=4
self.xcomp_check(['p(X,Y):-spouse(X,Y).','p(X,Y):-p(Y,X).'], 'p(i,o)','william',{'susan': 5.0})
program.DEFAULT_MAXDEPTH=10
self.xcomp_check(['p(X,Y):-spouse(X,Y).','p(X,Y):-p(Y,X).'], 'p(i,o)','william',{'susan': 11.0})
def test_const_output(self):
self.xcomp_check(['sis(X,W):-assign(W,william),child(X,Y).'], 'sis(i,o)', 'sarah', {'william': 1.0})
self.xcomp_check(['sis(X,W):-assign(W,william),child(X,Y).'], 'sis(i,o)', 'lottie', {'william': 2.0})
def test_const_chain1(self):
self.xcomp_check(['p(X,S) :- assign(S,susan),sister(X,Y),child(Y,Z).'],'p(i,o)','william',{'susan': 5.0})
def test_const_chain2(self):
self.xcomp_check(['p(X,Pos) :- assign(Pos,pos),child(X,Y),young(Y).'],'p(i,o)','sarah',{'pos':1.0})
self.xcomp_check(['p(X,Pos) :- assign(Pos,pos),child(X,Y),young(Y).'],'p(i,o)','lottie',{'pos':2.0})
def test_alt_chain(self):
self.xcomp_check(['p(X,W) :- spouse(X,W),sister(X,Y),child(Y,Z).'],'p(i,o)','william',{'susan': 5.0})
pass
def test_proppr1(self):
w = 7*self.db.onehot('r1')+3*self.db.onehot('r2')
self.proppr_xcomp_check(w,['p(X,Y):-sister(X,Y) {r1}.','p(X,Y):-spouse(X,Y) {r2}.'],'p(i,o)',
'william', {'sarah': 7.0, 'rachel': 7.0, 'lottie': 7.0, 'susan': 3.0})
def test_proppr2(self):
w = 3*self.db.onehot('r2')
self.proppr_xcomp_check(w,['p(X,Y):-spouse(Y,X) {r2}.'],'p(i,o)',
'susan', {'william': 3.0})
def test_reuse1(self):
self.xcomp_check(['p(X,Y) :- r(X,Z),r(Z,Y).', 'r(X,Y):-spouse(X,Y).'], 'p(i,o)', 'william',
{'william':1.0})
def _removeZeros(self, sdict):
if True: return sdict
e = sdict[None]
ret = dict([ (k,v-e) for (k,v) in list(sdict.items()) if v != e])
z = sum(ret.values())
for k in ret: ret[k] = ret[k]/z
return ret
def xcomp_check(self,ruleStrings,mode_string,input_symbol,expected_result_dict,compare=False):
self._xcomp_check('vanilla',None,ruleStrings,mode_string,input_symbol,expected_result_dict,compare)
def proppr_xcomp_check(self,weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict):
self._xcomp_check('proppr',weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict)
def _xcomp_check(self,progType,weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict,compare=False):
if RUN_OLD_INFERENCE_TESTS:
if progType=='proppr':
self.proppr_inference_check(weightVec,ruleStrings,mode_string,input_symbol,expected_result_dict)
else:
self.inference_check(ruleStrings,mode_string,input_symbol,expected_result_dict)
print('xcomp inference for mode',mode_string,'on input',input_symbol)
testtensorlog.softmax_normalize(expected_result_dict)
rules = parser.RuleCollection()
for r in ruleStrings:
rules.add(parser.Parser().parseRule(r))
if progType=='proppr':
prog = program.ProPPRProgram(db=self.db,rules=rules,weights=weightVec)
else:
prog = program.Program(db=self.db,rules=rules)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(prog)
print('== performing eval with',compilerClass,'==')
inferenceFun = xc.inferenceFunction(mode_string)
y = inferenceFun(prog.db.onehot(input_symbol))
# =',xc.inference(mode_string) theano output will a be (probably
actual_result_dict = self.db.rowAsSymbolDict(y)
self.check_maxes_in_dicts(actual_result_dict, expected_result_dict)
l1_error = abs(sum(actual_result_dict.values()) - 1.0)
#print 'l1_error',l1_error,'actual_result_dict',actual_result_dict,'expected_result_dict',expected_result_dict
self.assertTrue( l1_error < 0.0001)
# also test proofCountFun
proofCountFun = xc.proofCountFunction(mode_string)
pc = proofCountFun(prog.db.onehot(input_symbol))
# theano output will a be (probably dense) message, so
# just compare that maximal elements from these two dicts
# are the same
pc_result_dict = self.db.rowAsSymbolDict(pc)
if len(pc_result_dict)>0:
self.check_maxes_in_dicts(pc_result_dict, expected_result_dict)
print('== eval checks passed ==')
close_cross_compiler(xc)
def check_maxes_in_dicts(self,actual,expected):
def maximalElements(d):
m = max(d.values())
return set(k for k in d if d[k]==m)
actualMaxes = maximalElements(actual)
expectedMaxes = maximalElements(expected)
print('actual',actualMaxes,'expected',expectedMaxes)
for a in actualMaxes:
self.assertTrue(a in expectedMaxes)
for a in expectedMaxes:
self.assertTrue(a in actualMaxes)
class TestXCGrad(testtensorlog.TestGrad):
def setUp(self):
self.db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,'fam.cfacts'))
def test_if(self):
rules = ['p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_if2(self):
rules = ['p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah']), ('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie']), ('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_reverse_if(self):
rules = ['p(X,Y):-parent(Y,X).']
mode = 'p(i,o)'
params = [('parent',2)]
self.xgrad_check(rules, mode, params,
[('lottie',['charlotte'])],
{'parent(charlotte,lottie)': +1,'parent(lucas,lottie)': -1})
def test_chain1(self):
rules = ['p(X,Z):-sister(X,Y),child(Y,Z).']
mode = 'p(i,o)'
self.xgrad_check(rules,mode,
[('sister',2)],
[('william',['caroline','elizabeth'])],
{'sister(william,rachel)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules,mode,
[('child',2)],
[('william',['caroline','elizabeth'])],
{'child(rachel,elizabeth)': +1,'child(lottie,lucas)': -1})
self.xgrad_check(rules,mode,
[('child',2),('sister',2)],
[('william',['caroline','elizabeth'])],
{'child(rachel,elizabeth)': +1,'child(lottie,lucas)': -1, 'sister(william,rachel)': +1,'sister(william,lottie)': -1})
def test_chain2(self):
rules = ['p(X,Z):-spouse(X,Y),sister(Y,Z).']
mode = 'p(i,o)'
self.xgrad_check(rules,mode,
[('sister',2)],
[('susan',['rachel'])],
{'sister(william,rachel)': +1,'sister(william,lottie)': -1})
def test_call1(self):
rules = ['q(X,Y):-sister(X,Y).','p(Z,W):-q(Z,W).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_call2(self):
rules = ['q(X,Y):-sister(X,Y).','p(Z,W):-r(Z,W).','r(Z,W):-q(Z,W).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': +1,'sister(william,lottie)': -1})
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,rachel)': -1,'sister(william,lottie)': +1})
def test_split(self):
rules = ['p(X,Y):-sister(X,Y),child(Y,Z),young(Z).']
mode = 'p(i,o)'
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'child(lottie,lucas)': +1,'child(lottie,charlotte)': +1,'child(sarah,poppy)': -1})
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['lottie'])],
{'sister(william,lottie)': +1,'sister(william,sarah)': -1})
def test_or(self):
rules = ['p(X,Y):-child(X,Y).', 'p(X,Y):-sister(X,Y).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': -1,'sister(william,lottie)': -1})
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'child(william,charlie)': +1,'child(william,josh)': -1})
params = [('child',2),('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['charlie','rachel'])],
{'child(william,charlie)': +1,'child(william,josh)': -1,'sister(william,rachel)': +1,'sister(william,sarah)': -1})
def test_weighted_vec(self):
rules = ['p(X,Y):-sister(X,Y),assign(R,r1),feat(R).','p(X,Y):-child(X,Y),assign(R,r2),feat(R).']
mode = 'p(i,o)'
params = [('sister',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','charlie'])],
{'sister(william,rachel)': +1,'sister(william,sarah)': -1})
params = [('child',2)]
self.xgrad_check(rules, mode, params,
[('william',['rachel','charlie'])],
{'child(william,charlie)': +1,'child(william,josh)': -1})
params = [('feat',1)]
self.xgrad_check(rules, mode, params,
[('william',['josh','charlie'])],
{'feat(r1)': -1,'feat(r2)': +1})
self.xgrad_check(rules, mode, params,
[('william',['rachel','sarah','lottie'])],
{'feat(r1)': +1,'feat(r2)': -1})
def learnxc_check(self,rule_strings,mode_string,params,xyPairs,expected):
print("XLearner loss/grad eval")
rules = testtensorlog.rules_from_strings(rule_strings)
prog = program.Program(db=self.db,rules=rules)
mode = declare.ModeDeclaration(mode_string)
prog.db.clearParameterMarkings()
for (functor,arity) in params:
prog.db.markAsParameter(functor,arity)
# TODO: not working yet for mini-batches so check each example
# individually
for x,ys in xyPairs:
data = testtensorlog.DataBuffer(self.db)
data.add_data_symbols(x,ys)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(prog)
print('learner check for compiler',xc.__class__)
learner = learnxc.XLearner(prog,xc)
paramsWithUpdates = learner.crossEntropyGrad(mode,data.get_x(),data.get_y())
updates_with_string_keys = {}
for (functor,arity),up in paramsWithUpdates:
print('testxcomp update for',functor,arity,'is',up)
upDict = prog.db.matrixAsPredicateFacts(functor,arity,up)
print('upDict',upDict)
for fact,grad_of_fact in list(upDict.items()):
# need to flip for cross-compilers
updates_with_string_keys[str(fact)] = -grad_of_fact
self.check_directions(updates_with_string_keys,expected)
def xgrad_check(self,rule_strings,mode_string,params,xyPairs,expected):
print("direct loss/grad eval")
rules = testtensorlog.rules_from_strings(rule_strings)
prog = program.Program(db=self.db,rules=rules)
prog.db.clearParameterMarkings()
for (functor,arity) in params:
prog.db.markAsParameter(functor,arity)
for x,ys in xyPairs:
data = testtensorlog.DataBuffer(self.db)
data.add_data_symbols(x,ys)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(prog)
print('grad check for compiler',xc.__class__)
gradFun = xc.dataLossGradFunction(mode_string)
updates_with_string_keys = {}
paramsWithUpdates = gradFun(data.get_x(),data.get_y())
for (functor,arity),up in paramsWithUpdates:
upDict = prog.db.matrixAsPredicateFacts(functor,arity,up)
for fact,grad_of_fact in list(upDict.items()):
# need to flip for cross-compilers
updates_with_string_keys[str(fact)] = -grad_of_fact
self.check_directions(updates_with_string_keys,expected)
self.learnxc_check(rule_strings,mode_string,params,xyPairs,expected)
close_cross_compiler(xc)
class TestXCProPPR(testtensorlog.TestProPPR):
def setUp(self):
super(TestXCProPPR,self).setUp()
def debug(self):
return self
def evalxc(self,xc,input):
inferenceFun = xc.inferenceFunction('predict/io')
print(inferenceFun)
rawPred = inferenceFun(input)
# trim small numbers to zero
pred = mutil.mapData(lambda d:np.clip((d - 1e-5),0.00,9999.99), rawPred)
pred.eliminate_zeros()
return pred
def testNativeRow(self):
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
for i in range(self.numExamples):
pred = self.evalxc(xc, self.X.getrow(i))
d = self.prog.db.rowAsSymbolDict(pred)
uniform = {'pos':0.5,'neg':0.5}
self.check_dicts(d,uniform)
close_cross_compiler(xc)
def testNativeMatrix(self):
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
xc.ensureCompiled(self.mode,inputs=None)
pred = self.prog.eval(self.mode,[self.X])
d0 = self.prog.db.matrixAsSymbolDict(pred)
for i,d in list(d0.items()):
uniform = {'pos':0.5,'neg':0.5,}
self.check_dicts(d,uniform)
close_cross_compiler(xc)
def testGradVector(self):
data = testtensorlog.DataBuffer(self.prog.db)
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
learner = learn.OnePredFixedRateGDLearner(self.prog)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
#xc.compile(self.mode)
gradFun = xc.dataLossGradFunction('predict/io')
for i in range(X.shape[0]):
print("example",i)
updates = learner.crossEntropyGrad(declare.ModeDeclaration('predict(i,o)'),X[i],Y[i])
w0 = updates[('weighted',1)].sum(axis=0)
print(w0)
updates = gradFun(X[i],Y[i])
paramKey,w = updates[0]
print(w)
# w is different from the w in the corresponding testtensorlog test,
# which is a crossEntropy gradient for each example, but it should have
# opposite directions
nrow,ncol = w.shape
for i in range(nrow):
for j in range(ncol):
self.assertTrue((w[i,j]==0) == (w0[i,j]==0))
self.assertTrue(w[i,j] * w0[i,j] <= 0)
def testGradMatrix(self):
data = testtensorlog.DataBuffer(self.prog.db)
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
learner = learn.OnePredFixedRateGDLearner(self.prog)
updates = learner.crossEntropyGrad(declare.ModeDeclaration('predict(i,o)'),X,Y)
w0 = updates[('weighted',1)].sum(axis=0)
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
#xc.compile(self.mode)
gradFun = xc.dataLossGradFunction('predict/io')
updates = gradFun(X,Y)
paramKey,w = updates[0]
# w is different from the w in the corresponding testtensorlog test,
# which is a crossEntropy gradient for each example, but it should have
# opposite directions
nrow,ncol = w.shape
for i in range(nrow):
for j in range(ncol):
self.assertTrue((w[i,j]==0) == (w0[i,j]==0),"i=%d,j=%d,w=%g,w0=%g"%(i,j,w[i,j],w0[i,j]))
self.assertTrue(w[i,j] * w0[i,j] <= 0.0,"i=%d,j=%d,w=%g,w0=%g"%(i,j,w[i,j],w0[i,j]))
close_cross_compiler(xc)
def testMultiLearn1(self):
pass
def testLearn(self):
mode = declare.ModeDeclaration('predict(i,o)')
modestr = 'predict/io'
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
for compilerClass in TESTED_COMPILERS:
self.prog.setRuleWeights()
self.prog.setFeatureWeights()
if SAVE_SUMMARIES:
xc = compilerClass(self.prog,compilerClass.__name__+".summary")
else:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
v = self.prog.db.getParameter('weighted',1)
d = self.prog.db.rowAsSymbolDict(v)
# sanity check a couple of values
self.assertTrue(d['little_pos'] == d['little_neg'])
self.assertTrue(d['big_pos'] == d['big_neg'])
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
lossFun = xc.dataLossFunction('predict/io')
loss0 = lossFun(X,Y)
print('initial train data loss',loss0)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
loss1 = lossFun(TX,TY)
print('initial test data loss',loss1)
P = learner.predict('predict/io',X)
#acc0 = xc.accuracy('predict/io',X,Y)
acc0 = learner.accuracy(Y,P)
print('initial train accuracy',acc0)
TP = learner.predict('predict/io',TX)
#acc1 = xc.accuracy('predict/io',TX,TY)
acc1 = learner.accuracy(TY,TP)
print('initial test accuracy',acc1)
print('params to optimize',xc.prog.getParamList())
print('vars to optimize',xc.getParamVariables('predict/io'))
# xc.optimizeDataLoss('predict/io', optimizer, X, Y, epochs=20)
learner.trainMode('predict/io',X,Y)
loss2 = lossFun(X,Y)
print('final train data loss',loss2)
loss3 = lossFun(TX,TY)
print('final test data loss',loss3)
P2 = learner.predict('predict/io',X)
#acc2 = xc.accuracy('predict/io',X,Y)
acc2 = learner.accuracy(Y,P2)
print('final train accuracy',acc2)
TP2 = learner.predict('predict/io',TX)
#acc3 = xc.accuracy('predict/io',TX,TY)
acc3 = learner.accuracy(TY,TP2)
print('final test accuracy',acc3)
xc.exportAllLearnedParams()
v = self.prog.db.getParameter('weighted',1)
d = self.prog.db.rowAsSymbolDict(v)
# sanity check a couple of values
self.assertTrue(d['little_pos'] > d['little_neg'])
self.assertTrue(d['big_pos'] < d['big_neg'])
close_cross_compiler(xc)
self.assertTrue(acc2>=acc0)
self.assertTrue(acc3>=acc1)
self.assertTrue(loss2<loss0)
self.assertTrue(loss2<loss1)
self.assertTrue(acc2>=0.9)
self.assertTrue(acc2==1.0)
def testDatasetPredict(self):
mode = declare.ModeDeclaration('predict(i,o)')
modestr = 'predict/io'
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
for compilerClass in TESTED_COMPILERS:
self.prog.setRuleWeights()
self.prog.setFeatureWeights()
if SAVE_SUMMARIES:
xc = compilerClass(self.prog,compilerClass.__name__+".summary")
else:
xc = compilerClass(self.prog)
self.prog.db.markAsParameter('weighted',1)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
P = learner.predict(mode,X)
print("X",X.shape)
print("P",P.shape)
self.assertTrue(X.shape==P.shape)
P = learner.datasetPredict(dataset.Dataset({mode:X},{mode:Y}))
print("X",X.shape)
print("P",P.getX(mode).shape)
self.assertTrue(X.shape==P.getX(mode).shape)
return xc,learner,X,Y,P
def testExptScaffold(self):
mode = declare.ModeDeclaration('predict(i,o)')
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
self.prog.setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(self.prog)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc=xc,rate=0.1,epochs=20)
Expt({'prog':self.prog,
'trainData':dataset.Dataset({mode:X},{mode:Y}),
'testData':dataset.Dataset({mode:TX},{mode:TY}),
'targetMode':mode,
'learner':learner
}).run()
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testExpt(self):
mode = declare.ModeDeclaration('predict(i,o)')
X,Y = testtensorlog.matrixAsTrainingData(self.labeledData,'train',2)
TX,TY = testtensorlog.matrixAsTrainingData(self.labeledData,'test',2)
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(self.prog)
xc.runExpt(
prog=self.prog,
trainData=dataset.Dataset({mode:X},{mode:Y}),
testData=dataset.Dataset({mode:TX},{mode:TY}),
targetMode=mode)
close_cross_compiler(xc)
class TestXCOpGen(unittest.TestCase):
# TODO tests for other xcompilers?
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyTypes(self):
matrixdb.conf.ignore_types = False
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_small_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
mode = list(trainData.keys())[0]
docs,labels = trainData[mode]
xc = tlog.get_cross_compiler()
ops = xc.possibleOps(docs,'doc')
print('doc ops',ops)
self.assertTrue(len(ops)==1)
(words,wordType) = ops[0]
self.assertTrue(wordType=='word')
ops = xc.possibleOps(words,'word')
self.assertTrue(len(ops)==3)
pairs = None
for (expr,exprType) in ops:
if exprType=='labelWordPair':
pairs = expr
break
self.assertTrue(pairs is not None)
ops = xc.possibleOps(pairs,'labelWordPair')
self.assertTrue(len(ops)==2)
for (expr,exprType) in ops:
self.assertTrue(exprType=='word')
close_cross_compiler(xc)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyIgnoringTypes(self):
matrixdb.conf.ignore_types = True
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_small_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
mode = list(trainData.keys())[0]
docs,labels = trainData[mode]
xc = tlog.get_cross_compiler()
ops = xc.possibleOps(docs)
binary_predicates = [functor for (functor,arity) in tlog.db.matEncoding if arity==2]
self.assertTrue(len(ops) == len(binary_predicates)*2)
for x in ops:
# ops should just be tensors
self.assertFalse(isinstance(x,tuple))
close_cross_compiler(xc)
class TestXCExpt(unittest.TestCase):
def testTCToyTypes_wscaffold(self):
matrixdb.conf.ignore_types = False
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
optdict['prog'].setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(optdict['prog'])
learner = TESTED_LEARNERS[compilerClass](optdict['prog'],xc)
Expt({
'prog':optdict['prog'],
'trainData':optdict['trainData'],
'testData':optdict['testData'],
'learner':learner,
'targetMode':declare.asMode("predict/io")
}).run()
pbDoc = xc.db.onehot('pb','doc')
self.checkXC(xc,'predict/io',pbDoc,{'negPair':115,'posPair':115,'hasWord':59,'weighted':115,'label':5})
# some checks on the output of pprint
lines = xc.pprint('predict/io')
self.assertTrue(lines[0].find("SoftMaxFunction") >= 0)
self.assertTrue(lines[1].find("SumFunction") >= 0)
self.assertEqual(len(lines), 16)
# some checks on misc xcomp API
self.assertEqual(xc.inferenceOutputType('predict/io'),'label')
pbId = xc.asSymbolId('pb',typeName='doc')
pbSym = xc.asSymbol(pbId,typeName='doc')
self.assertEqual(pbSym,'pb')
self.assertEqual(xc.asSymbolId('this does not appear in the data',typeName='doc'), -1)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyTypes(self):
matrixdb.conf.ignore_types = False
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(optdict['prog'])
xc.runExpt(
prog=optdict['prog'],
trainData=optdict['trainData'],
testData=optdict['testData'],
targetMode=declare.asMode("predict/io"))
# check trainability
for (functor,arity) in xc.db.matEncoding:
v = xc.parameterFromDBToVariable(functor,arity)
if v is not None:
vIsTrainable = (v in tf.trainable_variables())
vIsParameter = ((functor,arity) in xc.db.paramSet)
self.assertEqual(vIsTrainable,vIsParameter)
pbDoc = xc.db.onehot('pb','doc')
self.checkXC(xc,'predict/io',pbDoc,{'negPair':115,'posPair':115,'hasWord':59,'weighted':115,'label':5})
# some checks on the output of pprint
lines = xc.pprint('predict/io')
self.assertTrue(lines[0].find("SoftMaxFunction") >= 0)
self.assertTrue(lines[1].find("SumFunction") >= 0)
self.assertEqual(len(lines), 16)
# some checks on misc xcomp API
self.assertEqual(xc.inferenceOutputType('predict/io'),'label')
pbId = xc.asSymbolId('pb',typeName='doc')
pbSym = xc.asSymbol(pbId,typeName='doc')
self.assertEqual(pbSym,'pb')
self.assertEqual(xc.asSymbolId('this does not appear in the data',typeName='doc'), -1)
close_cross_compiler(xc)
def testTCToyIgnoringTypes_wscaffold(self):
matrixdb.conf.ignore_types = True
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
optdict['prog'].setAllWeights()
for compilerClass in TESTED_COMPILERS:
xc = compilerClass(optdict['prog'])
learner = TESTED_LEARNERS[compilerClass](optdict['prog'],xc)
Expt({
'prog':optdict['prog'],
'trainData':optdict['trainData'],
'testData':optdict['testData'],
'learner':learner,
'targetMode':declare.asMode("predict/io")
}).run()
pbDoc = xc.db.onehot('pb')
self.checkXC(xc,'predict/io',pbDoc,collections.defaultdict(lambda:191))
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testTCToyIgnoringTypes(self):
matrixdb.conf.ignore_types = True
optdict,args = comline.parseCommandLine(
["--db", os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
"--prog", os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
"--trainData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"),
"--testData", os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"),
"--proppr"])
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(optdict['prog'])
xc.runExpt(
prog=optdict['prog'],
trainData=optdict['trainData'],
testData=optdict['testData'],
targetMode=declare.asMode("predict/io"))
pbDoc = xc.db.onehot('pb')
self.checkXC(xc,'predict/io',pbDoc,collections.defaultdict(lambda:191))
close_cross_compiler(xc)
def checkXC(self,xc,mode,rawInput,expectedCols):
print('matrixdb.conf.ignore_types',matrixdb.conf.ignore_types)
db = xc.db
for (functor,arity),mat in list(db.matEncoding.items()):
print(functor,arity,'shape',mat.shape)
r,c = mat.shape
self.assertEqual(c,expectedCols[functor])
inferenceFun = xc.inferenceFunction(mode)
y = inferenceFun(rawInput)
r,c = y.shape
self.assertEqual(c,expectedCols['label'])
class TestMultiModeXC(unittest.TestCase):
def setUp(self):
self.db = matrixdb.MatrixDB.loadFile(
os.path.join(testtensorlog.TEST_DATA_DIR,'matchtoy.cfacts'))
self.prog = program.ProPPRProgram.loadRules(
os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.ppr"),db=self.db)
self.dset = dataset.Dataset.loadExamples(
self.db, os.path.join(testtensorlog.TEST_DATA_DIR,'matchtoy-train.exam'),proppr=False)
self.prog.setAllWeights()
def testInScaffold(self):
print(TESTED_COMPILERS)
self.assertTrue(self.dset.modesToLearn() > 1)
self.prog.setAllWeights()
for compilerClass in TESTED_COMPILERS:
print(compilerClass)
xc = compilerClass(self.prog)
# compile everything
for mode in self.dset.modesToLearn():
xc.ensureCompiled(mode)
learner = TESTED_LEARNERS[compilerClass](self.prog,xc)
testAcc,testXent = Expt({
'prog':self.prog,
'trainData':self.dset,
'testData':self.dset,
'learner':learner,
'savedTestPredictions':'TestMultiModeXC.testInScaffold.%s.solutions.txt'%compilerClass.__name__
}).run()
print(testAcc)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def testIt(self):
self.assertTrue(self.dset.modesToLearn() > 1)
for compilerClass in [tensorflowxcomp.DenseMatDenseMsgCrossCompiler,
tensorflowxcomp.SparseMatDenseMsgCrossCompiler]:
xc = compilerClass(self.prog)
# compile everything
for mode in self.dset.modesToLearn():
xc.ensureCompiled(mode,inputs=None)
# check the variables
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
session = tf.Session()
session.run(tf.global_variables_initializer())
# set up for training
trainStep = {}
for mode in self.dset.modesToLearn():
(dataLossArgs,dataLossExpr) = xc.dataLoss(mode)
trainStep[mode] = optimizer.minimize(dataLossExpr, var_list=xc.getParamVariables(mode))
# train
for i in range(2): #epochs
for mode in self.dset.modesToLearn():
X = self.dset.getX(mode)
Y = self.dset.getY(mode)
fd = xc.getFeedDict(mode,X,Y,wrapped=False)
session.run(trainStep[mode],feed_dict=fd)
# test
for mode in self.dset.modesToLearn():
X = self.dset.getX(mode)
Y = self.dset.getY(mode)
Y_ = xc.inferenceFunction(mode)(X)
acc = xc.accuracy(mode,X,Y)
print('mode',mode,'acc',acc)
session.close()
close_cross_compiler(xc)
class TestMatParams(unittest.TestCase):
def setUp(self):
self.cacheDir = tempfile.mkdtemp()
def cacheFile(self,fileName):
return os.path.join(self.cacheDir,fileName)
def testMToyMatParam(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy.ppr"))
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"matchtoy-train.exam"))
tlog.db.markAsParameter('dabbrev',2)
factDict = tlog.db.matrixAsPredicateFacts('dabbrev',2,tlog.db.matEncoding[('dabbrev',2)])
print('before learning',len(factDict),'dabbrevs')
self.assertTrue(len(factDict)==5)
for f in sorted(factDict.keys()):
print('>',str(f),factDict[f])
# expt pipeline
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=TY.shape, name='tensorlog/trueY')
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
for i in range(5):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
tlog.set_all_db_params_to_learned_values(session)
# params = {'prog':prog,'trainData':trainData, 'testData':testData}
# result = expt.Expt(params).run()
# factDict = db.matrixAsPredicateFacts('dabbrev',2,db.matEncoding[('dabbrev',2)])
# print 'after learning',len(factDict),'dabbrevs'
# for f in sorted(factDict.keys()):
# print '>',str(f),factDict[f]
# self.assertTrue(len(factDict)>5)
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
class TestSimple(unittest.TestCase):
def testEmptyRules(self):
# should not throw an error
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
def testIncrementalDBLoad(self):
b = simple.Builder()
predict,label,hasWord,posPair,negPair = b.predicates("predict,label,hasWord,posPair,negPair")
doc_t,label_t,word_t,labelWordPair_t = b.types("doc_t,label_t,word_t,labelWordPair_t")
b.schema += predict(doc_t,label_t) & label(label_t)
b.schema += hasWord(doc_t,word_t) & posPair(word_t,labelWordPair_t) & negPair(word_t,labelWordPair_t)
for basename in "textcattoy_corpus.cfacts textcattoy_labels.cfacts textcattoy_pairs.cfacts".split(" "):
b.db += os.path.join(testtensorlog.TEST_DATA_DIR, basename)
tlog = simple.Compiler(db=b.db)
for (functor,arity,nnz) in [('hasWord',2,99),('label',1,2),('negPair',2,56)]:
m = tlog.db.matEncoding[(functor,arity)]
self.assertTrue(m.nnz == nnz)
def testBatch(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
# test a round-trip serialization
# saves the db
cacheDir = tempfile.mkdtemp()
db_file = os.path.join(cacheDir,'simple.db')
tlog.set_all_db_params_to_learned_values(session)
tlog.serialize_db(db_file)
# load everything into a new graph and don't reset the learned params
new_graph = tf.Graph()
with new_graph.as_default():
tlog2 = simple.Compiler(
db=db_file,
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"),
autoset_db_params=False)
inference2 = tlog2.inference(mode)
trueY2 = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY2')
correct2 = tf.equal(tf.argmax(trueY2,1), tf.argmax(inference2,1))
accuracy2 = tf.reduce_mean(tf.cast(correct2, tf.float32))
session2 = tf.Session()
session2.run(tf.global_variables_initializer())
test_batch_fd2 = {tlog2.input_placeholder_name(mode):UX, trueY2.name:UY}
acc3 = session2.run(accuracy2, feed_dict=test_batch_fd2)
print('accuracy after round-trip serialization',acc3)
self.assertTrue(acc3>=0.9)
session.close()
def testMinibatch(self):
tlog = simple.Compiler(
db=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"),
prog=os.path.join(testtensorlog.TEST_DATA_DIR,"textcat3.ppr"))
self.runTextCatLearner(tlog)
def runTextCatLearner(self,tlog):
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1, end=' ')
for mode,(TX,TY) in tlog.minibatches(trainData,batch_size=2):
print('.', end=' ')
train_minibatch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session.run(train_step, feed_dict=train_minibatch_fd)
print('epoch',i+1,'finished')
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
session.close()
def testBuilder1(self):
b = simple.Builder()
X,Y,Z = b.variables("X Y Z")
aunt,parent,sister,wife = b.predicates("aunt parent sister wife")
uncle = b.predicate("uncle")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y)
b += aunt(X,Y) <= parent(X,Z) & sister(Z,Y)
r1 = b.rule_id("ruleid_t","r1")
r2 = b.rule_id("ruleid_t","r2")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y) // r1
b += aunt(X,Y) <= parent(X,Z) & sister(Z,Y) // r2
feature,description = b.predicates("feature description")
weight = b.predicate("weight")
F = b.variable("F")
D = b.variable("D")
b += aunt(X,Y) <= uncle(X,Z) & wife(Z,Y) // (weight(F) | description(X,D) & feature(X,F))
b.rules.listing()
rs = b.rules.rulesFor(parser.Goal('aunt',[X,Y]))
self.assertEqual(str(rs[0]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y).")
self.assertEqual(str(rs[1]), "aunt(X,Y) :- parent(X,Z), sister(Z,Y).")
self.assertEqual(str(rs[2]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y) {weight(R1) : assign(R1,r1,ruleid_t)}.")
self.assertEqual(str(rs[3]), "aunt(X,Y) :- parent(X,Z), sister(Z,Y) {weight(R2) : assign(R2,r2,ruleid_t)}.")
self.assertEqual(str(rs[4]), "aunt(X,Y) :- uncle(X,Z), wife(Z,Y) {weight(F) : description(X,D),feature(X,F)}.")
def testBuilder2(self):
b = simple.Builder()
predict,assign,weighted,hasWord,posPair,negPair = b.predicates("predict assign weighted hasWord posPair negPair")
X,Pos,Neg,F,W = b.variables("X Pos Neg F W")
b += predict(X,Pos) <= assign(Pos,'pos','label') // (weighted(F) | hasWord(X,W) & posPair(W,F))
b += predict(X,Neg) <= assign(Neg,'neg','label') // (weighted(F) | hasWord(X,W) & negPair(W,F))
dbSpec = os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts")
self.runTextCatLearner(simple.Compiler(db=dbSpec,prog=b.rules))
def testBuilder3(self):
b = simple.Builder()
predict,assign,weighted,hasWord,posPair,negPair,label = b.predicates("predict assign weighted hasWord posPair negPair label")
doc_t,label_t,word_t,labelWordPair_t = b.types("doc_t label_t word_t labelWordPair_t")
b.schema += predict(doc_t,label_t)
b.schema += hasWord(doc_t,word_t)
b.schema += posPair(word_t,labelWordPair_t)
b.schema += negPair(word_t,labelWordPair_t)
b.schema += label(label_t)
X,Pos,Neg,F,W = b.variables("X Pos Neg F W")
b.rules += predict(X,Pos) <= assign(Pos,'pos','label_t') // (weighted(F) | hasWord(X,W) & posPair(W,F))
b.rules += predict(X,Neg) <= assign(Neg,'neg','label_t') // (weighted(F) | hasWord(X,W) & negPair(W,F))
b.db = os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy.cfacts")
self.runTextCatLearner(simple.Compiler(db=b.db, prog=b.rules))
class TestReparameterizationAndTypedLoading(unittest.TestCase):
def testBugWasFixed(self):
db = matrixdb.MatrixDB()
db.addLines(["# :- r(lo_or_hi_t)\n",
"\t".join("r low 0.1".split()) + "\n",
"\t".join("r hi 0.9".split()) + "\n"])
db.markAsParameter('r',1)
prog = program.Program(db=db)
typeName = db.schema.getArgType("r",1,0)
idLow = db.schema.getId(typeName,"low")
idHi = db.schema.getId(typeName,"hi")
db_r = db.matEncoding[('r',1)]
self.approxEqual(db_r[0,idLow], 0.1)
self.approxEqual(db_r[0,idHi], 0.9)
xc = tensorflowxcomp.SparseMatDenseMsgCrossCompiler(prog)
v_r = xc._vector(declare.asMode("r(i)"))
session = tf.Session()
session.run(tf.global_variables_initializer())
xc.exportAllLearnedParams()
print('exported to xc',db.matEncoding[('r',1)])
db_r = db.matEncoding[('r',1)]
self.approxEqual(db_r[0,idLow], 0.1)
self.approxEqual(db_r[0,idHi], 0.9)
def approxEqual(self,a,b):
self.assertTrue(abs(float(a)-b) < 0.0001)
class TestPlugins(unittest.TestCase):
def test_identity_io(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp1(Pos,Y) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp1(Neg,Y) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp1/io', lambda x:x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_identity_oi(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp2(Y,Pos) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp2(Y,Neg) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp2/oi', lambda x:x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_double_io1(self):
ruleStrings = ['predict(X,Y) :- assign(Pos,pos,label),udp3(Pos,Y) {weighted(F): hasWord(X,W),posPair(W,F)}.',
'predict(X,Y) :- assign(Neg,neg,label),udp3(Neg,Y) {weighted(F): hasWord(X,W),negPair(W,F)}.']
plugins = program.Plugins()
plugins.define('udp3/io', lambda x:2*x, lambda inputType:'label')
self.check_learning_with_udp(ruleStrings,plugins)
def test_double_io2(self):
ruleStrings = ['predict(X,Pos) :- assign(Pos,pos,label) {weighted(F): hasWord(X,W),double(W,W2),posPair(W2,F)}.',
'predict(X,Neg) :- assign(Neg,neg,label) {weighted(F2): hasWord(X,W),negPair(W,F),double(F,F2)}.']
plugins = program.Plugins()
plugins.define('double/io', lambda x:2*x, lambda inputType:inputType)
self.check_learning_with_udp(ruleStrings,plugins)
def test_kw_i(self):
ruleStrings = ['predict(X,Pos) :- assign(Pos,pos,label),hasWord(X,W),poskw(W).',
'predict(X,Neg) :- assign(Neg,neg,label),hasWord(X,W),negkw(W).']
plugins = program.Plugins()
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
poskw_v = (db.onehot('little','word') + db.onehot('red','word')).todense()
negkw_v = (db.onehot('big','word') + db.onehot('job','word') + db.onehot('huge','word')).todense()
plugins.define('poskw/i', lambda:poskw_v, lambda:'word')
plugins.define('negkw/i', lambda:negkw_v, lambda:'word')
self.check_udp(ruleStrings,plugins)
def check_udp(self,ruleStrings,plugins):
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts"))
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(testData.keys())[0]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
session.close()
# - so there's not a simple fix
def notest_isect_iio(self):
bpcompiler.conf.trace = True
ruleStrings = ['predict(X,Y) :- hasWord(X,W),posPair(W,P1),negPair(W,P2),isect(P1,P2,Y).']
plugins = program.Plugins()
plugins.define('isect/iio', lambda x1,x2:x1*x2, lambda t1,t2:t1)
self.assertTrue(plugins.isDefined(declare.asMode('isect/iio')))
self.check_learning_with_udp(ruleStrings,plugins)
def argmax(self):
bpcompiler.conf.trace = True
ruleStrings = ['predict(X,Y):-olympics(X,Z),nations(Z),argmax(Z,Y).']
plugins = program.Plugins()
plugins.define('argmax/io',lambda x1:tf.nn.softmax(x1), lambda t1:t1)
db = matrixdb.MatrixDB.loadFile(os.path.join(testtensorlog.TEST_DATA_DIR,'argmax.cfacts'))
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
prog.setAllWeights()
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
data = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"argmax.exam"))
mode = list(data.keys())[0]
UX,UY = data[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0>0.9)
session.close()
@unittest.skipUnless(xctargets.tf,"Tensorflow not available")
def check_learning_with_udp(self,ruleStrings,plugins,dbfile=os.path.join(testtensorlog.TEST_DATA_DIR,"textcattoy3.cfacts")):
db = matrixdb.MatrixDB.loadFile(dbfile)
rules = testtensorlog.rules_from_strings(ruleStrings)
prog = program.ProPPRProgram(rules=rules,db=db,plugins=plugins)
prog.setAllWeights()
mode = declare.asMode("predict/io")
prog.compile(mode)
fun = prog.function[(mode,0)]
print("\n".join(fun.pprint()))
tlog = simple.Compiler(db=db, prog=prog)
trainData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytrain.exam"))
testData = tlog.load_dataset(os.path.join(testtensorlog.TEST_DATA_DIR,"toytest.exam"))
mode = list(trainData.keys())[0]
TX,TY = trainData[mode]
UX,UY = testData[mode]
inference = tlog.inference(mode)
trueY = tf.placeholder(tf.float32, shape=UY.shape, name='tensorlog/trueY')
correct = tf.equal(tf.argmax(trueY,1), tf.argmax(inference,1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
test_batch_fd = {tlog.input_placeholder_name(mode):UX, trueY.name:UY}
loss = tlog.loss(mode)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
train_batch_fd = {tlog.input_placeholder_name(mode):TX, tlog.target_output_placeholder_name(mode):TY}
session = tf.Session()
session.run(tf.global_variables_initializer())
acc0 = session.run(accuracy, feed_dict=test_batch_fd)
print('initial accuracy',acc0)
self.assertTrue(acc0<0.6)
for i in range(10):
print('epoch',i+1)
session.run(train_step, feed_dict=train_batch_fd)
acc1 = session.run(accuracy, feed_dict=test_batch_fd)
print('final accuracy',acc1)
self.assertTrue(acc1>=0.9)
session.close()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
if 'theano' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__module__.endswith("theanoxcomp")]
if 'tensorflow' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__module__.endswith("tensorflowxcomp")]
if 'dense' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__name__.startswith("Dense")]
if 'sparse' in sys.argv[1:]:
TESTED_COMPILERS = [c for c in TESTED_COMPILERS if c.__name__.startswith("Sparse")]
sys.argv = [a for a in sys.argv if a not in "theano tensorflow dense sparse".split()]
print('TESTED_COMPILERS',TESTED_COMPILERS)
unittest.main()
| true | true |
f73d55686fda574f44e36edee36447e99b6ea541 | 3,326 | py | Python | lcm/ns_vnfs/biz/wait_job.py | onap/vfc-nfvo-lcm | b7d4d015fa96a246d73d863092d3362afcedc284 | [
"Apache-2.0"
] | 4 | 2018-08-29T02:51:38.000Z | 2021-11-16T11:36:11.000Z | lcm/ns_vnfs/biz/wait_job.py | onap/vfc-nfvo-lcm | b7d4d015fa96a246d73d863092d3362afcedc284 | [
"Apache-2.0"
] | null | null | null | lcm/ns_vnfs/biz/wait_job.py | onap/vfc-nfvo-lcm | b7d4d015fa96a246d73d863092d3362afcedc284 | [
"Apache-2.0"
] | 1 | 2019-05-12T08:21:19.000Z | 2019-05-12T08:21:19.000Z | # Copyright 2016 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import datetime
import logging
import math
from lcm.pub.utils.jobutil import JobUtil
from lcm.jobs.enum import JOB_MODEL_STATUS
from lcm.pub.msapi.vnfmdriver import query_vnfm_job
from lcm.pub.utils.values import ignore_case_get
logger = logging.getLogger(__name__)
def calc_progress(vnfm_progress, target_range=None):
target_range = [0, 100] if not target_range else target_range
progress = int(vnfm_progress) if vnfm_progress else 0
if progress > 100:
return progress
floor_progress = int(math.floor(float(target_range[1] - target_range[0]) / 100 * progress))
target_range = floor_progress + target_range[0]
return target_range
def default_callback(vnfo_job_id, vnfm_job_id, job_status, jobs, progress_range, **kwargs):
for job in jobs:
progress = calc_progress(ignore_case_get(job, 'progress'),
progress_range)
JobUtil.add_job_status(vnfo_job_id, progress,
ignore_case_get(job, 'statusdescription'),
ignore_case_get(job, 'errorcode'))
latest_progress = calc_progress(ignore_case_get(job_status, 'progress'),
progress_range)
JobUtil.add_job_status(vnfo_job_id, latest_progress,
ignore_case_get(job_status, 'statusdescription'),
ignore_case_get(job_status, 'errorcode'))
jobstatus = ignore_case_get(job_status, 'status')
if jobstatus in (JOB_MODEL_STATUS.ERROR, JOB_MODEL_STATUS.FINISHED):
return True, jobstatus
return False, JOB_MODEL_STATUS.PROCESSING
def wait_job_finish(vnfm_id, vnfo_job_id, vnfm_job_id, progress_range=None, timeout=600, job_callback=default_callback, **kwargs):
progress_range = [0, 100] if not progress_range else progress_range
response_id = 0
query_interval = 2
start_time = end_time = datetime.datetime.now()
while (end_time - start_time).seconds < timeout:
query_status, result = query_vnfm_job(vnfm_id, vnfm_job_id, response_id)
time.sleep(query_interval)
end_time = datetime.datetime.now()
if not query_status:
continue
job_status = ignore_case_get(result, 'responsedescriptor')
response_id_new = ignore_case_get(job_status, 'responseid')
if response_id_new == response_id:
continue
response_id = response_id_new
jobs = ignore_case_get(job_status, 'responsehistorylist', [])
if jobs:
jobs.reverse()
is_end, status = job_callback(vnfo_job_id, vnfm_job_id, job_status, jobs, progress_range, **kwargs)
if is_end:
return status
return JOB_MODEL_STATUS.TIMEOUT
| 42.101266 | 130 | 0.699639 |
import time
import datetime
import logging
import math
from lcm.pub.utils.jobutil import JobUtil
from lcm.jobs.enum import JOB_MODEL_STATUS
from lcm.pub.msapi.vnfmdriver import query_vnfm_job
from lcm.pub.utils.values import ignore_case_get
logger = logging.getLogger(__name__)
def calc_progress(vnfm_progress, target_range=None):
target_range = [0, 100] if not target_range else target_range
progress = int(vnfm_progress) if vnfm_progress else 0
if progress > 100:
return progress
floor_progress = int(math.floor(float(target_range[1] - target_range[0]) / 100 * progress))
target_range = floor_progress + target_range[0]
return target_range
def default_callback(vnfo_job_id, vnfm_job_id, job_status, jobs, progress_range, **kwargs):
for job in jobs:
progress = calc_progress(ignore_case_get(job, 'progress'),
progress_range)
JobUtil.add_job_status(vnfo_job_id, progress,
ignore_case_get(job, 'statusdescription'),
ignore_case_get(job, 'errorcode'))
latest_progress = calc_progress(ignore_case_get(job_status, 'progress'),
progress_range)
JobUtil.add_job_status(vnfo_job_id, latest_progress,
ignore_case_get(job_status, 'statusdescription'),
ignore_case_get(job_status, 'errorcode'))
jobstatus = ignore_case_get(job_status, 'status')
if jobstatus in (JOB_MODEL_STATUS.ERROR, JOB_MODEL_STATUS.FINISHED):
return True, jobstatus
return False, JOB_MODEL_STATUS.PROCESSING
def wait_job_finish(vnfm_id, vnfo_job_id, vnfm_job_id, progress_range=None, timeout=600, job_callback=default_callback, **kwargs):
progress_range = [0, 100] if not progress_range else progress_range
response_id = 0
query_interval = 2
start_time = end_time = datetime.datetime.now()
while (end_time - start_time).seconds < timeout:
query_status, result = query_vnfm_job(vnfm_id, vnfm_job_id, response_id)
time.sleep(query_interval)
end_time = datetime.datetime.now()
if not query_status:
continue
job_status = ignore_case_get(result, 'responsedescriptor')
response_id_new = ignore_case_get(job_status, 'responseid')
if response_id_new == response_id:
continue
response_id = response_id_new
jobs = ignore_case_get(job_status, 'responsehistorylist', [])
if jobs:
jobs.reverse()
is_end, status = job_callback(vnfo_job_id, vnfm_job_id, job_status, jobs, progress_range, **kwargs)
if is_end:
return status
return JOB_MODEL_STATUS.TIMEOUT
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.