hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49b6084783e543ccee0a668b59af47189b4cf26f | 802 | py | Python | oauth_provider/managers.py | articheck/django-oauth-plus | ed4201a87ec07fc5bbe52d618a62d6c311e810e4 | [
"BSD-3-Clause"
] | null | null | null | oauth_provider/managers.py | articheck/django-oauth-plus | ed4201a87ec07fc5bbe52d618a62d6c311e810e4 | [
"BSD-3-Clause"
] | 1 | 2019-06-21T07:42:46.000Z | 2019-06-21T17:49:00.000Z | oauth_provider/managers.py | articheck/django-oauth-plus | ed4201a87ec07fc5bbe52d618a62d6c311e810e4 | [
"BSD-3-Clause"
] | null | null | null | from django.db import models
class TokenManager(models.Manager):
def create_token(self, consumer, token_type, timestamp, scope,
user=None, callback=None, callback_confirmed=False):
"""Shortcut to create a token with random key/secret."""
token, created = self.get_or_create(consumer=consumer,
token_type=token_type,
timestamp=timestamp,
scope=scope,
user=user,
callback=callback,
callback_confirmed=callback_confirmed)
if created:
token.generate_random_codes()
return token
| 44.555556 | 82 | 0.483791 |
ee0273207a272d23f8177822b231a3aede7b7ea7 | 7,187 | py | Python | mercury/system/pubsub.py | Accenture/mercury-python | 1119916b1046c519d1588a7fe6bf924722ca0e0a | [
"Apache-2.0"
] | 6 | 2019-04-30T17:32:39.000Z | 2021-08-21T10:56:53.000Z | mercury/system/pubsub.py | Accenture/mercury-python | 1119916b1046c519d1588a7fe6bf924722ca0e0a | [
"Apache-2.0"
] | 2 | 2019-09-23T04:09:51.000Z | 2020-03-25T14:58:56.000Z | mercury/system/pubsub.py | Accenture/mercury-python | 1119916b1046c519d1588a7fe6bf924722ca0e0a | [
"Apache-2.0"
] | 7 | 2019-05-10T00:35:17.000Z | 2020-11-22T11:16:18.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018-2021 Accenture Technology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from mercury.system.singleton import Singleton
from mercury.platform import Platform
from mercury.system.po import PostOffice
from mercury.system.models import EventEnvelope, AppException
from mercury.system.utility import Utility
@Singleton
class PubSub:
def __init__(self):
self.platform = Platform()
self.po = PostOffice()
self.util = Utility()
self.subscription = dict()
def subscription_sync(headers: dict, body: any):
if 'type' in headers and headers['type'] == 'subscription_sync':
if len(self.subscription) > 0:
for topic in self.subscription:
route_map = self.subscription[topic]
for route in route_map:
parameters = route_map[route]
self.platform.log.info('Update subscription '+topic+' -> '+route)
self.subscribe(topic, route, parameters)
else:
self.platform.log.info('No subscription to update')
self.platform.register('pub.sub.sync', subscription_sync, 1, is_private=True)
def feature_enabled(self):
result = self.po.request('pub.sub.controller', 10.0, headers={'type': 'feature'})
return self._normalize_result(result, True)
def list_topics(self):
result = self.po.request('pub.sub.controller', 10.0, headers={'type': 'list'})
return self._normalize_result(result, list())
def exists(self, topic: str):
if isinstance(topic, str):
result = self.po.request('pub.sub.controller', 10.0, headers={'type': 'exists', 'topic': topic})
return self._normalize_result(result, True)
else:
return False
def create_topic(self, topic: str):
if isinstance(topic, str):
result = self.po.request('pub.sub.controller', 10.0, headers={'type': 'create', 'topic': topic})
return self._normalize_result(result, True)
else:
raise ValueError("topic must be str")
def delete_topic(self, topic: str):
if isinstance(topic, str):
result = self.po.request('pub.sub.controller', 10.0, headers={'type': 'delete', 'topic': topic})
return self._normalize_result(result, True)
else:
raise ValueError("topic must be str")
def publish(self, topic: str, headers: dict = None, body: any = None):
if isinstance(topic, str):
# encode payload
payload = dict()
payload['body'] = body
payload['headers'] = self._normalize_headers(headers)
result = self.po.request('pub.sub.controller', 10.0,
headers={'type': 'publish', 'topic': topic}, body=payload)
return self._normalize_result(result, True)
else:
raise ValueError("topic must be str")
def subscribe(self, topic: str, route: str, parameters: list = None):
if isinstance(topic, str) and isinstance(route, str):
if self.platform.has_route(route):
normalized_config = self._normalize_parameters(parameters)
result = self.po.request('pub.sub.controller', 10.0, body=normalized_config,
headers={'type': 'subscribe', 'topic': topic, 'route': route})
done = self._normalize_result(result, True)
if done:
if topic not in self.subscription:
self.subscription[topic] = dict()
self.platform.log.info('Subscribed topic ' + topic)
route_map: dict = self.subscription[topic]
if route not in route_map:
route_map[route] = normalized_config
self.platform.log.info('Adding '+route+' to topic '+topic)
return done
else:
raise ValueError("Unable to subscribe topic " + topic + " because route " + route + " not registered")
else:
raise ValueError("topic and route must be str")
def unsubscribe(self, topic: str, route: str):
if isinstance(topic, str) and isinstance(route, str):
if self.platform.has_route(route):
result = self.po.request('pub.sub.controller', 10.0,
headers={'type': 'unsubscribe', 'topic': topic, 'route': route})
done = self._normalize_result(result, True)
if done:
if topic in self.subscription:
route_map: dict = self.subscription[topic]
if route in route_map:
route_map.pop(route)
self.platform.log.info('Removing ' + route + ' from topic ' + topic)
if len(route_map) == 0:
self.subscription.pop(topic)
self.platform.log.info('Unsubscribed topic ' + topic)
return done
else:
raise ValueError("Unable to unsubscribe topic " + topic + " because route " + route + " not registered")
else:
raise ValueError("topic and route must be str")
@staticmethod
def _normalize_result(result: EventEnvelope, result_obj: any):
if isinstance(result, EventEnvelope):
if result.get_status() == 200:
if isinstance(result.get_body(), type(result_obj)):
return result.get_body()
else:
raise AppException(500, str(result.get_body()))
else:
raise AppException(result.get_status(), str(result.get_body()))
@staticmethod
def _normalize_headers(headers: dict):
if headers is None:
return dict()
if isinstance(headers, dict):
result = dict()
for h in headers:
result[str(h)] = str(headers[h])
return result
else:
raise ValueError("headers must be dict of str key-values")
@staticmethod
def _normalize_parameters(parameters: list):
if parameters is None:
return list()
if isinstance(parameters, list):
result = list()
for h in parameters:
result.append(str(h))
return result
else:
raise ValueError("headers must be a list of str")
| 43.295181 | 120 | 0.57284 |
91e75e80a0a7e77bc0205ef7f4f153c2f4ddaa8b | 609 | py | Python | libs/mpl/preprocessed/preprocess_vector.py | jmuskaan72/Boost | 047e36c01841a8cd6a5c74d4e3034da46e327bc1 | [
"BSL-1.0"
] | 198 | 2015-01-13T05:47:18.000Z | 2022-03-09T04:46:46.000Z | libs/mpl/preprocessed/preprocess_vector.py | xiaoliang2121/Boost | fc90c3fde129c62565c023f091eddc4a7ed9902b | [
"BSL-1.0"
] | 4 | 2015-03-19T08:23:23.000Z | 2019-06-24T07:48:47.000Z | libs/mpl/preprocessed/preprocess_vector.py | xiaoliang2121/Boost | fc90c3fde129c62565c023f091eddc4a7ed9902b | [
"BSL-1.0"
] | 139 | 2015-01-15T20:09:31.000Z | 2022-01-31T15:21:16.000Z |
# Copyright Aleksey Gurtovoy 2001-2006
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# See http://www.boost.org/libs/mpl for documentation.
# $Id: preprocess_vector.py 49269 2008-10-11 06:30:50Z agurtovoy $
# $Date: 2008-10-10 23:30:50 -0700 (Fri, 10 Oct 2008) $
# $Revision: 49269 $
import preprocess
import os.path
preprocess.main(
[ "no_ctps", "plain", "typeof_based" ]
, "vector"
, os.path.join( "boost", "mpl", "vector", "aux_", "preprocessed" )
)
| 27.681818 | 71 | 0.656814 |
c2f7264a7d7760e990c773dda04f1fad709004d3 | 268 | py | Python | python/testData/inspections/PyDunderSlotsInspection/inheritedClassAttrAssignmentAndOwnWithDictAndInheritedWithAttrSlotsPy3.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/PyDunderSlotsInspection/inheritedClassAttrAssignmentAndOwnWithDictAndInheritedWithAttrSlotsPy3.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | python/testData/inspections/PyDunderSlotsInspection/inheritedClassAttrAssignmentAndOwnWithDictAndInheritedWithAttrSlotsPy3.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | class B(object):
attr = 'baz'
__slots__ = ['f', <warning descr="'attr' in __slots__ conflicts with a class variable">'attr'</warning>]
class C(B):
__slots__ = ['foo', 'bar', '__dict__']
C.attr = 'spam'
print(C.attr)
c = C()
c.attr = 'spam'
print(c.attr) | 20.615385 | 108 | 0.61194 |
f8b8ee9a038bd9d51b10fd9715e46ee72605e272 | 17,368 | py | Python | opacus/optimizers/optimizer.py | RQuispeC/opacus | 5c83d59fc169e93667946204f7a6859827a38ace | [
"Apache-2.0"
] | null | null | null | opacus/optimizers/optimizer.py | RQuispeC/opacus | 5c83d59fc169e93667946204f7a6859827a38ace | [
"Apache-2.0"
] | null | null | null | opacus/optimizers/optimizer.py | RQuispeC/opacus | 5c83d59fc169e93667946204f7a6859827a38ace | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import logging
from typing import Callable, List, Optional, Union
import torch
from opacus.optimizers.utils import params
from torch import nn
from torch.optim import Optimizer
logger = logging.getLogger(__name__)
def _mark_as_processed(obj: Union[torch.Tensor, List[torch.Tensor]]):
"""
Marks parameters that have already been used in the optimizer step.
DP-SGD puts certain restrictions on how gradients can be accumulated. In particular,
no gradient can be used twice - client must call .zero_grad() between
optimizer steps, otherwise privacy guarantees are compromised.
This method marks tensors that have already been used in optimizer steps to then
check if zero_grad has been duly called.
Notes:
This is used to only mark ``p.grad_sample`` and ``p.summed_grad``
Args:
obj: tensor or a list of tensors to be marked
"""
if isinstance(obj, torch.Tensor):
obj._processed = True
elif isinstance(obj, list):
for x in obj:
x._processed = True
def _check_processed_flag_tensor(x: torch.Tensor):
"""
Checks if this gradient tensor has been previously used in optimization step.
See Also:
:meth:`~opacus.optimizers.optimizer._mark_as_processed`
Args:
x: gradient tensor
Raises:
ValueError
If tensor has attribute ``._processed`` previously set by
``_mark_as_processed`` method
"""
if hasattr(x, "_processed"):
raise ValueError(
"Gradients haven't been cleared since the last optimizer step. "
"In order to obtain privacy guarantees you must call optimizer.zero_grad()"
"on each step"
)
def _check_processed_flag(obj: Union[torch.Tensor, List[torch.Tensor]]):
"""
Checks if this gradient tensor (or a list of tensors) has been previously
used in optimization step.
See Also:
:meth:`~opacus.optimizers.optimizer._mark_as_processed`
Args:
x: gradient tensor or a list of tensors
Raises:
ValueError
If tensor (or at least one tensor from the list) has attribute
``._processed`` previously set by ``_mark_as_processed`` method
"""
if isinstance(obj, torch.Tensor):
_check_processed_flag_tensor(obj)
elif isinstance(obj, list):
for x in obj:
_check_processed_flag_tensor(x)
def _generate_noise(
std: float,
reference: torch.Tensor,
generator=None,
secure_mode: bool = False,
) -> torch.Tensor:
"""
Generates noise according to a Gaussian distribution with mean 0
Args:
std: Standard deviation of the noise
reference: The reference Tensor to get the appropriate shape and device
for generating the noise
generator: The PyTorch noise generator
secure_mode: boolean showing if "secure" noise need to be generate
(see the notes)
Notes:
If `secure_mode` is enabled, the generated noise is also secure
against the floating point representation attacks, such as the ones
in https://arxiv.org/abs/2107.10138 and https://arxiv.org/abs/2112.05307.
The attack for Opacus first appeared in https://arxiv.org/abs/2112.05307.
The implemented fix is based on https://arxiv.org/abs/2107.10138 and is
achieved through calling the Gaussian noise function 2*n times, when n=2
(see section 5.1 in https://arxiv.org/abs/2107.10138).
Reason for choosing n=2: n can be any number > 1. The bigger, the more
computation needs to be done (`2n` Gaussian samples will be generated).
The reason we chose `n=2` is that, `n=1` could be easy to break and `n>2`
is not really necessary. The complexity of the attack is `2^p(2n-1)`.
In PyTorch, `p=53` and so complexity is `2^53(2n-1)`. With `n=1`, we get
`2^53` (easy to break) but with `n=2`, we get `2^159`, which is hard
enough for an attacker to break.
"""
zeros = torch.zeros(reference.shape, device=reference.device)
if std == 0:
return zeros
# TODO: handle device transfers: generator and reference tensor
# could be on different devices
if secure_mode:
torch.normal(
mean=0,
std=std,
size=(1, 1),
device=reference.device,
generator=generator,
) # generate, but throw away first generated Gaussian sample
sum = zeros
for _ in range(4):
sum += torch.normal(
mean=0,
std=std,
size=reference.shape,
device=reference.device,
generator=generator,
)
return sum / 2
else:
return torch.normal(
mean=0,
std=std,
size=reference.shape,
device=reference.device,
generator=generator,
)
def _get_flat_grad_sample(p: torch.Tensor):
"""
Return parameter's per sample gradients as a single tensor.
By default, per sample gradients (``p.grad_sample``) are stored as one tensor per
batch basis. Therefore, ``p.grad_sample`` is a single tensor if holds results from
only one batch, and a list of tensors if gradients are accumulated over multiple
steps. This is done to provide visibility into which sample belongs to which batch,
and how many batches have been processed.
This method returns per sample gradients as a single concatenated tensor, regardless
of how many batches have been accumulated
Args:
p: Parameter tensor. Must have ``grad_sample`` attribute
Returns:
``p.grad_sample`` if it's a tensor already, or a single tensor computed by
concatenating every tensor in ``p.grad_sample`` if it's a list
Raises:
ValueError
If ``p`` is missing ``grad_sample`` attribute
"""
if not hasattr(p, "grad_sample"):
raise ValueError(
"Per sample gradient not found. Are you using GradSampleModule?"
)
if isinstance(p.grad_sample, torch.Tensor):
return p.grad_sample
elif isinstance(p.grad_sample, list):
return torch.cat(p.grad_sample, dim=0)
else:
raise ValueError(f"Unexpected grad_sample type: {type(p.grad_sample)}")
class DPOptimizer(Optimizer):
"""
``torch.optim.Optimizer`` wrapper that adds additional functionality to clip per
sample gradients and add Gaussian noise.
Can be used with any ``torch.optim.Optimizer`` subclass as an underlying optimizer.
``DPOptimzer`` assumes that parameters over which it performs optimization belong
to GradSampleModule and therefore have the ``grad_sample`` attribute.
On a high level ``DPOptimizer``'s step looks like this:
1) Aggregate ``p.grad_sample`` over all parameters to calculate per sample norms
2) Clip ``p.grad_sample`` so that per sample norm is not above threshold
3) Aggregate clipped per sample gradients into ``p.grad``
4) Add Gaussian noise to ``p.grad`` calibrated to a given noise multiplier and
max grad norm limit (``std = noise_multiplier * max_grad_norm``).
5) Call underlying optimizer to perform optimization step
Examples:
>>> module = MyCustomModel()
>>> optimizer = torch.optim.SGD(module.parameters(), lr=0.1)
>>> dp_optimzer = DPOptimizer(
... optimizer=optimizer,
... noise_multiplier=1.0,
... max_grad_norm=1.0,
... expected_batch_size=4,
... )
"""
def __init__(
self,
optimizer: Optimizer,
*,
noise_multiplier: float,
max_grad_norm: float,
expected_batch_size: Optional[int],
loss_reduction: str = "mean",
generator=None,
secure_mode: bool = False,
):
"""
Args:
optimizer: wrapped optimizer.
noise_multiplier: noise multiplier
max_grad_norm: max grad norm used for gradient clipping
expected_batch_size: batch_size used for averaging gradients. When using
Poisson sampling averaging denominator can't be inferred from the
actual batch size. Required is ``loss_reduction="mean"``, ignored if
``loss_reduction="sum"``
loss_reduction: Indicates if the loss reduction (for aggregating the gradients)
is a sum or a mean operation. Can take values "sum" or "mean"
generator: torch.Generator() object used as a source of randomness for
the noise
secure_mode: if ``True`` uses noise generation approach robust to floating
point arithmetic attacks.
See :meth:`~opacus.optimizers.optimizer._generate_noise` for details
"""
if loss_reduction not in ("mean", "sum"):
raise ValueError(f"Unexpected value for loss_reduction: {loss_reduction}")
if loss_reduction == "mean" and expected_batch_size is None:
raise ValueError(
"You must provide expected batch size of the loss reduction is mean"
)
self.original_optimizer = optimizer
self.noise_multiplier = noise_multiplier
self.max_grad_norm = max_grad_norm
self.loss_reduction = loss_reduction
self.expected_batch_size = expected_batch_size
self.step_hook = None
self.generator = generator
self.secure_mode = secure_mode
self.param_groups = optimizer.param_groups
self.state = optimizer.state
self._step_skip_queue = []
self._is_last_step_skipped = False
for p in self.params:
p.summed_grad = None
def signal_skip_step(self, do_skip=True):
"""
Signals the optimizer to skip an optimization step and only perform clipping and
per sample gradient accumulation.
On every call of ``.step()`` optimizer will check the queue of skipped step
signals. If non-empty and the latest flag is ``True``, optimizer will call
``self.clip_and_accumulate``, but won't proceed to adding noise and performing
the actual optimization step.
It also affects the behaviour of ``zero_grad()``. If the last step was skipped,
optimizer will clear per sample gradients accumulated by
``self.clip_and_accumulate`` (``p.grad_sample``), but won't touch aggregated
clipped gradients (``p.summed_grad``)
Used by :class:`~opacus.utils.batch_memory_manager.BatchMemoryManager` to
simulate large virtual batches with limited memory footprint.
Args:
do_skip: flag if next step should be skipped
"""
self._step_skip_queue.append(do_skip)
def _check_skip_next_step(self):
if self._step_skip_queue:
return self._step_skip_queue.pop(0)
else:
return False
@property
def params(self) -> List[nn.Parameter]:
"""
Returns a flat list of ``nn.Parameter`` managed by the optimizer
"""
return params(self)
@property
def grad_samples(self) -> List[torch.Tensor]:
"""
Returns a flat list of per sample gradient tensors (one per parameter)
"""
ret = []
for p in self.params:
ret.append(_get_flat_grad_sample(p))
return ret
@property
def accumulated_iterations(self) -> int:
"""
Returns number of batches currently accumulated and not yet processed.
In other words ``accumulated_iterations`` tracks the number of forward/backward
passed done in between two optimizer steps. The value would typically be 1,
but there are possible exceptions.
Used by privacy accountants to calculate real sampling rate.
"""
vals = []
for p in self.params:
if not hasattr(p, "grad_sample"):
raise ValueError(
"Per sample gradient not found. Are you using GradSampleModule?"
)
if isinstance(p.grad_sample, torch.Tensor):
vals.append(1)
elif isinstance(p.grad_sample, list):
vals.append(len(p.grad_sample))
else:
raise ValueError(f"Unexpected grad_sample type: {type(p.grad_sample)}")
if len(set(vals)) > 1:
raise ValueError(
"Number of accumulated steps is inconsistent across parameters"
)
return vals[0]
def attach_step_hook(self, fn: Callable[[DPOptimizer], None]):
"""
Attaches a hook to be executed after gradient clipping/noising, but before the
actual optimization step.
Most commonly used for privacy accounting.
Args:
fn: hook function. Expected signature: ``foo(optim: DPOptimizer)``
"""
self.step_hook = fn
def clip_and_accumulate(self):
"""
Performs gradient clipping.
Stores clipped and aggregated gradients into `p.summed_grad```
"""
per_param_norms = [
g.view(len(g), -1).norm(2, dim=-1) for g in self.grad_samples
]
per_sample_norms = torch.stack(per_param_norms, dim=1).norm(2, dim=1)
per_sample_clip_factor = (self.max_grad_norm / (per_sample_norms + 1e-6)).clamp(
max=1.0
)
for p in self.params:
_check_processed_flag(p.grad_sample)
grad_sample = _get_flat_grad_sample(p)
grad = torch.einsum("i,i...", per_sample_clip_factor, grad_sample)
if p.summed_grad is not None:
p.summed_grad += grad
else:
p.summed_grad = grad
_mark_as_processed(p.grad_sample)
def add_noise(self):
"""
Adds noise to clipped gradients. Stores clipped and noised result in ``p.grad``
"""
for p in self.params:
_check_processed_flag(p.summed_grad)
noise = _generate_noise(
std=self.noise_multiplier * self.max_grad_norm,
reference=p.summed_grad,
generator=self.generator,
secure_mode=self.secure_mode,
)
p.grad = p.summed_grad + noise
_mark_as_processed(p.summed_grad)
def scale_grad(self):
"""
Applies given ``loss_reduction`` to ``p.grad``.
Does nothing if ``loss_reduction="sum"``. Divides gradients by
``self.expected_batch_size`` if ``loss_reduction="mean"``
"""
if self.loss_reduction == "mean":
for p in self.params:
p.grad /= self.expected_batch_size * self.accumulated_iterations
def zero_grad(self, set_to_none: bool = False):
"""
Clear gradients.
Clears ``p.grad``, ``p.grad_sample`` and ``p.summed_grad`` for all of it's parameters
Notes:
``set_to_none`` argument only affects ``p.grad``. ``p.grad_sample`` and
``p.summed_grad`` is never zeroed out and always set to None.
Normal grads can do this, because their shape is always the same.
Grad samples do not behave like this, as we accumulate gradients from different
batches in a list
Args:
set_to_none: instead of setting to zero, set the grads to None. (only
affects regular gradients. Per sample gradients are always set to None)
"""
if set_to_none is False:
logger.info(
"Despite set_to_none is set to False, "
"opacus will set p.grad_sample and p.summed_grad to None due to "
"non-trivial gradient accumulation behaviour"
)
for p in self.params:
p.grad_sample = None
if not self._is_last_step_skipped:
p.summed_grad = None
self.original_optimizer.zero_grad(set_to_none)
def pre_step(
self, closure: Optional[Callable[[], float]] = None
) -> Optional[float]:
"""
Perform actions specific to ``DPOptimizer`` before calling
underlying ``optimizer.step()``
Args:
closure: A closure that reevaluates the model and
returns the loss. Optional for most optimizers.
"""
self.clip_and_accumulate()
if self._check_skip_next_step():
self._is_last_step_skipped = True
return False
self.add_noise()
self.scale_grad()
if self.step_hook:
self.step_hook(self)
self._is_last_step_skipped = False
return True
def step(self, closure: Optional[Callable[[], float]] = None) -> Optional[float]:
if closure is not None:
with torch.enable_grad():
closure()
if self.pre_step():
return self.original_optimizer.step(closure)
else:
return None
def __repr__(self):
return self.original_optimizer.__repr__()
def state_dict(self):
return self.original_optimizer.state_dict()
def load_state_dict(self, state_dict) -> None:
self.original_optimizer.load_state_dict(state_dict)
| 35.229209 | 93 | 0.624885 |
287c160f911634b187576b11fa7b962506b0c201 | 2,027 | py | Python | server/app/views/__init__.py | return-capsule/backend | e899b30b93a4bbcf54c1688db0fcabbcb55fe668 | [
"Apache-2.0"
] | null | null | null | server/app/views/__init__.py | return-capsule/backend | e899b30b93a4bbcf54c1688db0fcabbcb55fe668 | [
"Apache-2.0"
] | null | null | null | server/app/views/__init__.py | return-capsule/backend | e899b30b93a4bbcf54c1688db0fcabbcb55fe668 | [
"Apache-2.0"
] | null | null | null | import boto3, botocore
from functools import wraps
from flask import current_app, abort, request
from flask_restful import Resource
from app.config import Config
def upload_file_to_s3(file, bucket_name, acl="public-read", access_key_id=Config.S3_KEY,
secret_access_key=Config.S3_SECRET):
# s3.upload_fileobj(file, bucket_name, file.filename,
# ExtraArgs={
# "ACL": acl,
# "ContentType": file.content_type
# })
# return f"{current_app.config['S3_URL']}/{file.filename}"
s3 = boto3.client(
's3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key
)
try:
s3.upload_fileobj(
file,
bucket_name,
file.filename,
ExtraArgs={
"ACL": acl,
"ContentType": file.content_type
}
)
except Exception as e:
# This is a catch all exception, edit this part to fit your needs.
print("Something Happened: ", e)
return e
return f"{current_app.config['S3_URL']}/{file.filename}"
def check_json(keys):
def decorator(original_func):
@wraps(original_func)
def wrapper(*args, **kwargs):
if not request.is_json:
abort(406)
for k, t in keys.items():
if k not in request.json or type(request.json[k]) is not t:
print(k)
abort(400)
return original_func(*args, **kwargs)
return wrapper
return decorator
class BaseResource(Resource):
@classmethod
def unicode_safe_json_dumps(cls):
pass
class Router:
def __init__(self, app=None):
if app:
self.init_app(app)
def init_app(self, app):
from app.views import auth, capsule
app.register_blueprint(auth.api.blueprint)
app.register_blueprint(capsule.api.blueprint)
| 25.024691 | 88 | 0.573754 |
2ee39c9b17651f85fa2dc6c3ddf48d46c62e721d | 10,636 | py | Python | google/ads/google_ads/v5/services/hotel_performance_view_service_client.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | 1 | 2021-04-09T04:28:47.000Z | 2021-04-09T04:28:47.000Z | google/ads/google_ads/v5/services/hotel_performance_view_service_client.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v5/services/hotel_performance_view_service_client.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.ads.googleads.v5.services HotelPerformanceViewService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.path_template
from google.ads.google_ads.v5.services import hotel_performance_view_service_client_config
from google.ads.google_ads.v5.services.transports import hotel_performance_view_service_grpc_transport
from google.ads.google_ads.v5.proto.services import hotel_performance_view_service_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-ads',
).version
class HotelPerformanceViewServiceClient(object):
"""Service to manage Hotel Performance Views."""
SERVICE_ADDRESS = 'googleads.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.ads.googleads.v5.services.HotelPerformanceViewService'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
HotelPerformanceViewServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def hotel_performance_view_path(cls, customer):
"""Return a fully-qualified hotel_performance_view string."""
return google.api_core.path_template.expand(
'customers/{customer}/hotelPerformanceView',
customer=customer,
)
def __init__(self, transport=None, channel=None, credentials=None,
client_config=None, client_info=None, client_options=None):
"""Constructor.
Args:
transport (Union[~.HotelPerformanceViewServiceGrpcTransport,
Callable[[~.Credentials, type], ~.HotelPerformanceViewServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn('The `client_config` argument is deprecated.',
PendingDeprecationWarning, stacklevel=2)
else:
client_config = hotel_performance_view_service_client_config.config
if channel:
warnings.warn('The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning, stacklevel=2)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(client_options)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=hotel_performance_view_service_grpc_transport.HotelPerformanceViewServiceGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.'
)
self.transport = transport
else:
self.transport = hotel_performance_view_service_grpc_transport.HotelPerformanceViewServiceGrpcTransport(
address=api_endpoint,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def get_hotel_performance_view(
self,
resource_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Returns the requested Hotel Performance View in full detail.
Args:
resource_name (str): Required. Resource name of the Hotel Performance View to fetch.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.ads.googleads_v5.types.HotelPerformanceView` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_hotel_performance_view' not in self._inner_api_calls:
self._inner_api_calls['get_hotel_performance_view'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_hotel_performance_view,
default_retry=self._method_configs['GetHotelPerformanceView'].retry,
default_timeout=self._method_configs['GetHotelPerformanceView'].timeout,
client_info=self._client_info,
)
request = hotel_performance_view_service_pb2.GetHotelPerformanceViewRequest(
resource_name=resource_name,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [('resource_name', resource_name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(routing_header)
metadata.append(routing_metadata)
return self._inner_api_calls['get_hotel_performance_view'](request, retry=retry, timeout=timeout, metadata=metadata)
| 44.689076 | 124 | 0.660963 |
2804dde5b1df341b802efdd02343bdb9cbbecded | 1,773 | py | Python | int/acapy-backchannel/acapy_backchannel/models/query_result.py | sicpa-dlab/aries-acapy-plugin-didcomm-uniresolver | 93760877422c8021385170cb9747caadeb47e149 | [
"Apache-2.0"
] | null | null | null | int/acapy-backchannel/acapy_backchannel/models/query_result.py | sicpa-dlab/aries-acapy-plugin-didcomm-uniresolver | 93760877422c8021385170cb9747caadeb47e149 | [
"Apache-2.0"
] | 10 | 2021-04-05T11:33:30.000Z | 2021-05-12T08:40:47.000Z | int/acapy-backchannel/acapy_backchannel/models/query_result.py | sicpa-dlab/aries-acapy-plugin-didcomm-uniresolver | 93760877422c8021385170cb9747caadeb47e149 | [
"Apache-2.0"
] | 1 | 2021-03-01T18:03:32.000Z | 2021-03-01T18:03:32.000Z | from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.query_result_results import QueryResultResults
from ..types import UNSET, Unset
T = TypeVar("T", bound="QueryResult")
@attr.s(auto_attribs=True)
class QueryResult:
""" """
results: Union[Unset, QueryResultResults] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
results: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.results, Unset):
results = self.results.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if results is not UNSET:
field_dict["results"] = results
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
results: Union[Unset, QueryResultResults] = UNSET
_results = d.pop("results", UNSET)
if not isinstance(_results, Unset):
results = QueryResultResults.from_dict(_results)
query_result = cls(
results=results,
)
query_result.additional_properties = d
return query_result
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| 29.065574 | 77 | 0.646926 |
a757c33e20641328d39a4074d35ffbf0fd109fb2 | 2,740 | py | Python | vanity.py | N3TC4T/xrp-vanity-generator | 0799545172f3aeef8ed978270a945a3052e01927 | [
"Apache-2.0"
] | null | null | null | vanity.py | N3TC4T/xrp-vanity-generator | 0799545172f3aeef8ed978270a945a3052e01927 | [
"Apache-2.0"
] | null | null | null | vanity.py | N3TC4T/xrp-vanity-generator | 0799545172f3aeef8ed978270a945a3052e01927 | [
"Apache-2.0"
] | 4 | 2019-01-05T17:49:59.000Z | 2019-12-05T07:36:42.000Z | # coding=utf-8
###############################################################################
# Ripple Vanity Address Generator
# !/usr/bin/python
###############################################################################
from __future__ import print_function
import argparse
import multiprocessing
import re
import sys
from ripple import genb58seed, seed2accid
class bcolors:
HEADER = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def worker(FOUND_FLAG):
regex = "^(r)(%s)(.+)$|^(r.+)(%s)$" % (NAME, NAME)
seed = genb58seed()
fpgadd, accadd, accid = seed2accid(seed)
if _verbose:
print(bcolors.HEADER + "[-] " + accid)
if re.match(regex, accid):
FOUND_FLAG.set()
print(bcolors.OKGREEN + bcolors.BOLD + "\n[*]FOUND IT:")
print("---------------------------------------------------")
print("[!]ADDRESS: ", accid)
print("[!]SEED: ", seed)
print("---------------------------------------------------\n" + bcolors.ENDC)
sys.exit()
def starter():
"""
process workers initialize
"""
numbers = 0
print(bcolors.HEADER + "\n[!] Initializing Workers")
m = multiprocessing.Manager()
FOUND_FLAG = m.Event()
print("[!] Start Workers ... \n" + bcolors.ENDC)
try:
while not FOUND_FLAG.is_set():
procs = []
for w in range(THREAD):
numbers += 1
process = multiprocessing.Process(target=worker, args=(FOUND_FLAG,))
procs.append(process)
process.start()
if numbers % 100 == 0:
sys.stdout.write('.')
if numbers % 1000 == 0:
sys.stdout.write("\r" + str(numbers) + ' ')
sys.stdout.flush()
for p in procs:
p.join()
except (KeyboardInterrupt, SystemExit):
print("Caught KeyboardInterrupt, terminating workers")
sys.exit()
finally:
sys.exit()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Ripple Vanity Address Generator",
epilog="./vanity -n brian -p 4 -v"
)
# required argument
parser.add_argument('-n', '--name', action="store", required=True, help='Target Name')
# optional arguments
parser.add_argument('-p', '--process', help='NUM of Process', type=int, default=4)
parser.add_argument('-v', '--verbose', action='store_const', help='Verbose', const=True, default=False)
args = parser.parse_args()
NAME = args.name
THREAD = args.process
_verbose = args.verbose
starter()
| 27.676768 | 107 | 0.510584 |
001f30a1631a20319d09c9c0237e61299c205ea3 | 12,583 | py | Python | plugins/modules/oci_waas_custom_protection_rule_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_waas_custom_protection_rule_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_waas_custom_protection_rule_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_waas_custom_protection_rule_actions
short_description: Perform actions on a CustomProtectionRule resource in Oracle Cloud Infrastructure
description:
- Perform actions on a CustomProtectionRule resource in Oracle Cloud Infrastructure
- For I(action=change_compartment), moves a custom protection rule into a different compartment within the same tenancy. When provided, If-Match is checked
against ETag values of the custom protection rule. For information about moving resources between compartments, see L(Moving Resources to a Different
Compartment,https://docs.cloud.oracle.com/iaas/Content/Identity/Tasks/managingcompartments.htm#moveRes).
version_added: "2.9.0"
author: Oracle (@oracle)
options:
custom_protection_rule_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule. This number is generated when
the custom protection rule is added to the compartment.
type: str
aliases: ["id"]
required: true
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment into which the resource should be moved. For
information about moving resources between compartments, see L(Moving Resources to a Different
Compartment,https://docs.cloud.oracle.com/iaas/Content/Identity/Tasks/managingcompartments.htm#moveRes).
type: str
required: true
action:
description:
- The action to perform on the CustomProtectionRule.
type: str
required: true
choices:
- "change_compartment"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Perform action change_compartment on custom_protection_rule
oci_waas_custom_protection_rule_actions:
custom_protection_rule_id: "ocid1.customprotectionrule.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
action: change_compartment
"""
RETURN = """
custom_protection_rule:
description:
- Details of the CustomProtectionRule resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule's compartment.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- The user-friendly name of the custom protection rule.
returned: on success
type: str
sample: display_name_example
description:
description:
- The description of the custom protection rule.
returned: on success
type: str
sample: description_example
mod_security_rule_ids:
description:
- The auto-generated ID for the custom protection rule. These IDs are referenced in logs.
returned: on success
type: list
sample: []
template:
description:
- The template text of the custom protection rule. All custom protection rules are expressed in ModSecurity Rule Language.
- Additionally, each rule must include two placeholder variables that are updated by the WAF service upon publication of the rule.
- "`id: {{id_1}}` - This field is populated with a unique rule ID generated by the WAF service which identifies a `SecRule`. More than one
`SecRule` can be defined in the `template` field of a CreateCustomSecurityRule call. The value of the first `SecRule` must be `id: {{id_1}}`
and the `id` field of each subsequent `SecRule` should increase by one, as shown in the example."
- "`ctl:ruleEngine={{mode}}` - The action to be taken when the criteria of the `SecRule` are met, either `OFF`, `DETECT` or `BLOCK`. This field
is automatically populated with the corresponding value of the `action` field of the `CustomProtectionRuleSetting` schema when the `WafConfig`
is updated."
- "*Example:*
```
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 1/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 1/2.', \\\\
id: {{id_1}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 2/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 2/2.', \\\\
id: {{id_2}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
```"
- The example contains two `SecRules` each having distinct regex expression to match the `Cookie` header value during the second input analysis
phase.
- For more information about custom protection rules, see L(Custom Protection
Rules,https://docs.cloud.oracle.com/Content/WAF/tasks/customprotectionrules.htm).
- "For more information about ModSecurity syntax, see L(Making Rules: The Basic
Syntax,https://www.modsecurity.org/CRS/Documentation/making.html)."
- For more information about ModSecurity's open source WAF rules, see L(Mod Security's OWASP Core Rule Set
documentation,https://www.modsecurity.org/CRS/Documentation/index.html).
returned: on success
type: str
sample: template_example
lifecycle_state:
description:
- The current lifecycle state of the custom protection rule.
returned: on success
type: str
sample: CREATING
time_created:
description:
- The date and time the protection rule was created, expressed in RFC 3339 timestamp format.
returned: on success
type: str
sample: "2018-11-16T21:10:29Z"
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"mod_security_rule_ids": [],
"template": "template_example",
"lifecycle_state": "CREATING",
"time_created": "2018-11-16T21:10:29Z",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.waas import WaasClient
from oci.waas.models import ChangeCustomProtectionRuleCompartmentDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class CustomProtectionRuleActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
change_compartment
"""
@staticmethod
def get_module_resource_id_param():
return "custom_protection_rule_id"
def get_module_resource_id(self):
return self.module.params.get("custom_protection_rule_id")
def get_get_fn(self):
return self.client.get_custom_protection_rule
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_custom_protection_rule,
custom_protection_rule_id=self.module.params.get(
"custom_protection_rule_id"
),
)
def change_compartment(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, ChangeCustomProtectionRuleCompartmentDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.change_custom_protection_rule_compartment,
call_fn_args=(),
call_fn_kwargs=dict(
custom_protection_rule_id=self.module.params.get(
"custom_protection_rule_id"
),
change_custom_protection_rule_compartment_details=action_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
CustomProtectionRuleActionsHelperCustom = get_custom_class(
"CustomProtectionRuleActionsHelperCustom"
)
class ResourceHelper(
CustomProtectionRuleActionsHelperCustom, CustomProtectionRuleActionsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=False
)
module_args.update(
dict(
custom_protection_rule_id=dict(aliases=["id"], type="str", required=True),
compartment_id=dict(type="str", required=True),
action=dict(type="str", required=True, choices=["change_compartment"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="custom_protection_rule",
service_client_class=WaasClient,
namespace="waas",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main()
| 43.24055 | 160 | 0.625685 |
48bbbb3d4d29cfd38d74953f523d29be12c45a8e | 501 | py | Python | client/verta/verta/_swagger/_public/uac/model/UacDeleteRoleBinding.py | CaptEmulation/modeldb | 78b10aca553e386554f9740db63466b1cf013a1a | [
"Apache-2.0"
] | 835 | 2017-02-08T20:14:24.000Z | 2020-03-12T17:37:49.000Z | client/verta/verta/_swagger/_public/uac/model/UacDeleteRoleBinding.py | CaptEmulation/modeldb | 78b10aca553e386554f9740db63466b1cf013a1a | [
"Apache-2.0"
] | 651 | 2019-04-18T12:55:07.000Z | 2022-03-31T23:45:09.000Z | client/verta/verta/_swagger/_public/uac/model/UacDeleteRoleBinding.py | CaptEmulation/modeldb | 78b10aca553e386554f9740db63466b1cf013a1a | [
"Apache-2.0"
] | 170 | 2017-02-13T14:49:22.000Z | 2020-02-19T17:59:12.000Z | # THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class UacDeleteRoleBinding(BaseType):
def __init__(self, id=None):
required = {
"id": False,
}
self.id = id
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
tmp = d.get('id', None)
if tmp is not None:
d['id'] = tmp
return UacDeleteRoleBinding(**d)
| 20.875 | 62 | 0.622754 |
6d7ebe9dcabaae6d88b297a474270baf84c3096e | 2,162 | py | Python | part5/test_calc5.py | aniruddha2000/fusion | a6788935f10bb2d88c30e9106ff07202569d77db | [
"Apache-2.0"
] | null | null | null | part5/test_calc5.py | aniruddha2000/fusion | a6788935f10bb2d88c30e9106ff07202569d77db | [
"Apache-2.0"
] | null | null | null | part5/test_calc5.py | aniruddha2000/fusion | a6788935f10bb2d88c30e9106ff07202569d77db | [
"Apache-2.0"
] | 1 | 2021-02-27T13:11:50.000Z | 2021-02-27T13:11:50.000Z | import unittest
from calc5 import *
class LexerTestCase(unittest.TestCase):
def makeLexer(self, text):
lexer = Lexer(text)
return lexer
def test_lexer_integer(self):
lexer = self.makeLexer('10')
token = lexer.get_next_token()
self.assertEqual(token.type, INTEGER)
self.assertEqual(token.value, 10)
def test_lexer_plus(self):
lexer = self.makeLexer('+')
token = lexer.get_next_token()
self.assertEqual(token.type, PLUS)
self.assertEqual(token.value, '+')
def test_lexer_minus(self):
lexer = self.makeLexer('-')
token = lexer.get_next_token()
self.assertEqual(token.type, MINUS)
self.assertEqual(token.value, '-')
def test_lexer_mul(self):
lexer = self.makeLexer('*')
token = lexer.get_next_token()
self.assertEqual(token.type, MUL)
self.assertEqual(token.value, '*')
def test_lexer_div(self):
lexer = self.makeLexer('/')
token = lexer.get_next_token()
self.assertEqual(token.type, DIV)
self.assertEqual(token.value, '/')
class TestInterpreter(unittest.TestCase):
def makeInterpreter(self, text):
lexer = Lexer(text)
interpreter = Interpreter(lexer)
return interpreter
def test_only_int(self):
interpreter = self.makeInterpreter('3')
result = interpreter.expr()
self.assertEqual(result, 3)
def test_plus_mul(self):
interpreter = self.makeInterpreter('4 + 8 * 2')
result = interpreter.expr()
self.assertEqual(result, 20)
def test_minus_div(self):
interpreter = self.makeInterpreter('10 - 6 / 3')
result = interpreter.expr()
self.assertEqual(result, 8)
def test_all_operators(self):
interpreter = self.makeInterpreter('7 * 4 / 2 + 5 / 3 - 6')
result = interpreter.expr()
self.assertEqual(result, 9)
def test_invalid_expression(self):
interpreter = self.makeInterpreter('3 *')
with self.assertRaises(Exception):
interpreter.expr()
if __name__ == '__main__':
unittest.main()
| 28.447368 | 67 | 0.623959 |
4d4d5dad105a3ea6c75b0bc44cffac8f198e3acd | 16,768 | py | Python | electrumsv/devices/keepkey/keepkey.py | CherryDT/electrumsv | 6b778b1c363e22286c3e3ef1bc5a2fa56955ac48 | [
"MIT"
] | 1 | 2021-12-28T10:52:11.000Z | 2021-12-28T10:52:11.000Z | electrumsv/devices/keepkey/keepkey.py | SomberNight/electrumsv | 28262e3cab7b73e4960466f8aee252975953acf8 | [
"MIT"
] | null | null | null | electrumsv/devices/keepkey/keepkey.py | SomberNight/electrumsv | 28262e3cab7b73e4960466f8aee252975953acf8 | [
"MIT"
] | null | null | null | # ElectrumSV - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
# Copyright (C) 2019 ElectrumSV developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import threading
from bitcoinx import (
BIP32PublicKey, BIP32Derivation, bip32_decompose_chain_string, Address, OP_RETURN_Output,
)
from electrumsv.app_state import app_state
from electrumsv.device import Device
from electrumsv.exceptions import UserCancelled
from electrumsv.i18n import _
from electrumsv.keystore import Hardware_KeyStore
from electrumsv.logs import logs
from electrumsv.networks import Net
from electrumsv.transaction import classify_tx_output
from electrumsv.util import bfh
from ..hw_wallet import HW_PluginBase
# TREZOR initialization methods
TIM_NEW, TIM_RECOVER, TIM_MNEMONIC, TIM_PRIVKEY = range(0, 4)
KEEPKEY_PRODUCT_KEY = 'KeepKey'
NULL_DERIVATION = BIP32Derivation(chain_code=bytes(32), n=0, depth=0, parent_fingerprint=bytes(4))
class KeepKey_KeyStore(Hardware_KeyStore):
hw_type = 'keepkey'
device = KEEPKEY_PRODUCT_KEY
def get_derivation(self):
return self.derivation
def get_client(self, force_pair=True):
return self.plugin.get_client(self, force_pair)
def decrypt_message(self, sequence, message, password):
raise RuntimeError(_('Encryption and decryption are not implemented by {}').format(
self.device))
def sign_message(self, sequence, message, password):
client = self.get_client()
address_path = self.get_derivation() + "/%d/%d"%sequence
address_n = bip32_decompose_chain_string(address_path)
msg_sig = client.sign_message(self.plugin.get_coin_name(client), address_n, message)
return msg_sig.signature
def sign_transaction(self, tx, password):
if tx.is_complete():
return
# path of the xpubs that are involved
xpub_path = {}
for txin in tx.inputs:
for x_pubkey in txin.x_pubkeys:
if not x_pubkey.is_bip32_key():
continue
xpub = x_pubkey.bip32_extended_key()
if xpub == self.get_master_public_key():
xpub_path[xpub] = self.get_derivation()
self.plugin.sign_transaction(self, tx, xpub_path)
class KeepKeyPlugin(HW_PluginBase):
MAX_LABEL_LEN = 32
firmware_URL = 'https://www.keepkey.com'
libraries_URL = 'https://github.com/keepkey/python-keepkey'
minimum_firmware = (4, 0, 0)
keystore_class = KeepKey_KeyStore
def __init__(self, name):
super().__init__(name)
try:
from . import client
import keepkeylib
import keepkeylib.ckd_public
self.client_class = client.KeepKeyClient
self.ckd_public = keepkeylib.ckd_public
self.types = keepkeylib.client.types
self.DEVICE_IDS = (KEEPKEY_PRODUCT_KEY,)
self.libraries_available = True
except ImportError:
self.libraries_available = False
self.logger = logs.get_logger("plugin.keepkey")
self.main_thread = threading.current_thread()
def get_coin_name(self, client):
# No testnet support yet
if client.features.major_version < 6:
return "BitcoinCash"
return "BitcoinSV"
def _enumerate_hid(self):
if self.libraries_available:
from keepkeylib.transport_hid import HidTransport
return HidTransport.enumerate()
return []
def _enumerate_web_usb(self):
if self.libraries_available:
from keepkeylib.transport_webusb import WebUsbTransport
return WebUsbTransport.enumerate()
return []
def _get_transport(self, device):
self.logger.debug("Trying to connect over USB...")
if device.path.startswith('web_usb'):
for d in self._enumerate_web_usb():
if self._web_usb_path(d) == device.path:
from keepkeylib.transport_webusb import WebUsbTransport
return WebUsbTransport(d)
else:
for d in self._enumerate_hid():
if str(d[0]) == device.path:
from keepkeylib.transport_hid import HidTransport
return HidTransport(d)
raise RuntimeError(f'device {device} not found')
def _device_for_path(self, path):
return Device(
path=path,
interface_number=-1,
id_=path,
product_key=KEEPKEY_PRODUCT_KEY,
usage_page=0,
transport_ui_string=path,
)
def _web_usb_path(self, device):
return f'web_usb:{device.getBusNumber()}:{device.getPortNumberList()}'
def enumerate_devices(self):
devices = []
for device in self._enumerate_web_usb():
devices.append(self._device_for_path(self._web_usb_path(device)))
for device in self._enumerate_hid():
# Cast needed for older firmware
devices.append(self._device_for_path(str(device[0])))
return devices
def create_client(self, device, handler):
# disable bridge because it seems to never returns if keepkey is plugged
try:
transport = self._get_transport(device)
except Exception as e:
self.logger.error("cannot connect to device")
raise
self.logger.debug("connected to device at %s", device.path)
client = self.client_class(transport, handler, self)
# Try a ping for device sanity
try:
client.ping('t')
except Exception as e:
self.logger.error("ping failed %s", e)
return None
if not client.atleast_version(*self.minimum_firmware):
msg = (_('Outdated {} firmware for device labelled {}. Please '
'download the updated firmware from {}')
.format(self.device, client.label(), self.firmware_URL))
self.logger.error(msg)
handler.show_error(msg)
return None
return client
def get_client(self, keystore, force_pair=True):
client = app_state.device_manager.client_for_keystore(self, keystore, force_pair)
# returns the client for a given keystore. can use xpub
if client:
client.used()
return client
def initialize_device(self, device_id, wizard, handler):
# Initialization method
msg = _("Choose how you want to initialize your {}.\n\n"
"The first two methods are secure as no secret information "
"is entered into your computer.\n\n"
"For the last two methods you input secrets on your keyboard "
"and upload them to your {}, and so you should "
"only do those on a computer you know to be trustworthy "
"and free of malware."
).format(self.device, self.device)
choices = [
# Must be short as QT doesn't word-wrap radio button text
(TIM_NEW, _("Let the device generate a completely new seed randomly")),
(TIM_RECOVER, _("Recover from a seed you have previously written down")),
(TIM_MNEMONIC, _("Upload a BIP39 mnemonic to generate the seed")),
(TIM_PRIVKEY, _("Upload a master private key"))
]
def f(method):
settings = self.request_trezor_init_settings(wizard, method, self.device)
t = threading.Thread(target = self._initialize_device_safe,
args=(settings, method, device_id, wizard, handler))
t.setDaemon(True)
t.start()
wizard.loop.exec_()
wizard.choice_dialog(title=_('Initialize Device'), message=msg,
choices=choices, run_next=f)
def _initialize_device_safe(self, settings, method, device_id, wizard, handler):
exit_code = 0
try:
self._initialize_device(settings, method, device_id, wizard, handler)
except UserCancelled:
exit_code = 1
except Exception as e:
handler.show_error(str(e))
exit_code = 1
finally:
wizard.loop.exit(exit_code)
def _initialize_device(self, settings, method, device_id, wizard, handler):
item, label, pin_protection, passphrase_protection = settings
language = 'english'
client = app_state.device_manager.client_by_id(device_id)
if method == TIM_NEW:
strength = 64 * (item + 2) # 128, 192 or 256
client.reset_device(True, strength, passphrase_protection,
pin_protection, label, language)
elif method == TIM_RECOVER:
word_count = 6 * (item + 2) # 12, 18 or 24
client.step = 0
client.recovery_device(False, word_count, passphrase_protection,
pin_protection, label, language)
elif method == TIM_MNEMONIC:
pin = pin_protection # It's the pin, not a boolean
client.load_device_by_mnemonic(str(item), pin,
passphrase_protection,
label, language)
else:
pin = pin_protection # It's the pin, not a boolean
client.load_device_by_xprv(item, pin, passphrase_protection,
label, language)
def setup_device(self, device_info, wizard):
'''Called when creating a new wallet. Select the device to use. If
the device is uninitialized, go through the intialization
process.'''
device_id = device_info.device.id_
client = app_state.device_manager.client_by_id(device_id)
client.handler = self.create_handler(wizard)
if not device_info.initialized:
self.initialize_device(device_id, wizard, client.handler)
client.get_master_public_key('m')
def get_master_public_key(self, device_id, derivation, wizard):
client = app_state.device_manager.client_by_id(device_id)
client.handler = self.create_handler(wizard)
return client.get_master_public_key(derivation)
def sign_transaction(self, keystore, tx, xpub_path):
self.xpub_path = xpub_path
client = self.get_client(keystore)
inputs = self.tx_inputs(tx)
outputs = self.tx_outputs(keystore.get_derivation(), tx)
signatures = client.sign_tx(self.get_coin_name(client), inputs, outputs,
lock_time=tx.locktime)[0]
tx.update_signatures(signatures)
def show_address(self, wallet, address):
keystore = wallet.get_keystore()
client = self.get_client(keystore)
change, index = wallet.get_address_index(address)
derivation = keystore.derivation
address_path = "%s/%d/%d"%(derivation, change, index)
address_n = bip32_decompose_chain_string(address_path)
script_type = self.types.SPENDADDRESS
client.get_address(Net.KEEPKEY_DISPLAY_COIN_NAME, address_n,
True, script_type=script_type)
def tx_inputs(self, tx):
inputs = []
for txin in tx.inputs:
txinputtype = self.types.TxInputType()
txinputtype.prev_hash = bytes(reversed(txin.prev_hash))
txinputtype.prev_index = txin.prev_idx
txinputtype.sequence = txin.sequence
txinputtype.amount = txin.value
x_pubkeys = txin.x_pubkeys
if len(x_pubkeys) == 1:
x_pubkey = x_pubkeys[0]
xpub, path = x_pubkey.bip32_extended_key_and_path()
xpub_n = bip32_decompose_chain_string(self.xpub_path[xpub])
txinputtype.address_n.extend(xpub_n + path)
txinputtype.script_type = self.types.SPENDADDRESS
else:
def f(x_pubkey):
if x_pubkey.is_bip32_key():
xpub, path = x_pubkey.bip32_extended_key_and_path()
else:
xpub = BIP32PublicKey(bfh(x_pubkey), NULL_DERIVATION, Net.COIN)
xpub = xpub.to_extended_key_string()
path = []
node = self.ckd_public.deserialize(xpub)
return self.types.HDNodePathType(node=node, address_n=path)
pubkeys = [f(x) for x in x_pubkeys]
multisig = self.types.MultisigRedeemScriptType(
pubkeys=pubkeys,
signatures=txin.stripped_signatures_with_blanks(),
m=txin.threshold,
)
script_type = self.types.SPENDMULTISIG
txinputtype = self.types.TxInputType(
script_type=script_type,
multisig=multisig
)
# find which key is mine
for x_pubkey in x_pubkeys:
if x_pubkey.is_bip32_key():
xpub, path = x_pubkey.bip32_extended_key_and_path()
if xpub in self.xpub_path:
xpub_n = bip32_decompose_chain_string(self.xpub_path[xpub])
txinputtype.address_n.extend(xpub_n + path)
break
inputs.append(txinputtype)
return inputs
def tx_outputs(self, derivation, tx):
outputs = []
has_change = False
for tx_output, info in zip(tx.outputs, tx.output_info):
if info is not None and not has_change:
has_change = True # no more than one change address
index, xpubs, m = info
if len(xpubs) == 1:
script_type = self.types.PAYTOADDRESS
address_n = bip32_decompose_chain_string(derivation + "/%d/%d"%index)
txoutputtype = self.types.TxOutputType(
amount = tx_output.value,
script_type = script_type,
address_n = address_n,
)
else:
script_type = self.types.PAYTOMULTISIG
address_n = bip32_decompose_chain_string("/%d/%d"%index)
nodes = [self.ckd_public.deserialize(xpub) for xpub in xpubs]
pubkeys = [self.types.HDNodePathType(node=node, address_n=address_n)
for node in nodes]
multisig = self.types.MultisigRedeemScriptType(
pubkeys = pubkeys,
signatures = [b''] * len(pubkeys),
m = m)
txoutputtype = self.types.TxOutputType(
multisig = multisig,
amount = tx_output.value,
address_n = bip32_decompose_chain_string(derivation + "/%d/%d"%index),
script_type = script_type)
else:
txoutputtype = self.types.TxOutputType()
txoutputtype.amount = tx_output.value
address = classify_tx_output(tx_output)
if isinstance(address, Address):
txoutputtype.script_type = self.types.PAYTOADDRESS
txoutputtype.address = address.to_string(coin=Net.COIN)
elif isinstance(address, OP_RETURN_Output):
txoutputtype.script_type = self.types.PAYTOOPRETURN
txoutputtype.op_return_data = bytes(tx_output.script_pubkey)[2:]
outputs.append(txoutputtype)
return outputs
| 41.60794 | 98 | 0.610508 |
bb93c13b08e59983b9818d21e309fde3aa69fe34 | 1,740 | py | Python | pyprobml-master/figgen/daft/deepKF.py | storopoli/Machine-Learning-Probalistic | f8617e7b81f4d6c71e72edc40ba11ac746794a95 | [
"MIT"
] | 1 | 2019-03-04T05:43:10.000Z | 2019-03-04T05:43:10.000Z | figgen/daft/deepKF.py | tungk/pyprobml | fb939b92a61f7d3a7e6c28d0b14f58d99a0b07f1 | [
"MIT"
] | null | null | null | figgen/daft/deepKF.py | tungk/pyprobml | fb939b92a61f7d3a7e6c28d0b14f58d99a0b07f1 | [
"MIT"
] | null | null | null | from matplotlib import rc
import matplotlib.pyplot as plt
rc("font", family="serif", size=12)
rc("text", usetex=True)
#rc("text.latex", preamble=open("macros.tex").read())
#rc('text.latex', preamble="\usepackage{amssymb}\usepackage{amsmath}\usepackage{mathrsfs}")
import os
import imp
daft = imp.load_source('daft', 'daft-080308/daft.py')
#import daft
folder = "/Users/kpmurphy/github/pyprobml/figures"
pgm = daft.PGM([4, 7], origin=[0, -1])
pgm.add_node(daft.Node("Gold", r"$G^z_{t-1}$", 1, 3))
pgm.add_node(daft.Node("Gpred", r"$\hat{G}^{z}_{t}$", 2, 3))
pgm.add_node(daft.Node("Gnew", r"$G^z_{t}$", 3, 3))
pgm.add_node(daft.Node("alpha", r"$\alpha_{t}$", 1, 5))
pgm.add_node(daft.Node("A", r"$A_{t}$", 2, 4))
pgm.add_node(daft.Node("C", r"$C_{t}$", 3, 4))
pgm.add_node(daft.Node("priorh", r"$\hat{G}^x_{t}$", 2, 2))
pgm.add_node(daft.Node("posth", r"$G^x_{t}$", 3, 2))
pgm.add_node(daft.Node("px", r"$\hat{p}^y_{t}$", 2, 1))
pgm.add_node(daft.Node("L", r"$L_{t}$", 2, 0))
pgm.add_node(daft.Node("x", r"$o_{t}$", 3, 1))
pgm.add_node(daft.Node("y", r"$y_{t}$", 3, 0))
pgm.add_edge("Gold", "Gpred", linestyle="-")
pgm.add_edge("Gpred", "Gnew", linestyle="-")
pgm.add_edge("Gold", "alpha", linestyle="-")
pgm.add_edge("alpha", "A", linestyle="-")
pgm.add_edge("alpha", "C", linestyle="-")
pgm.add_edge("A", "Gpred", linestyle="-")
pgm.add_edge("C", "Gnew", linestyle="-")
pgm.add_edge("Gpred", "priorh", linestyle="-")
pgm.add_edge("priorh", "px", linestyle="-")
pgm.add_edge("px", "L", linestyle="-")
pgm.add_edge("y", "L", linestyle="-")
pgm.add_edge("x", "posth", linestyle="-")
pgm.add_edge("posth", "Gnew", linestyle="-")
pgm.render()
fname = "deepKF"
pgm.figure.savefig(os.path.join(folder, "{}.pdf".format(fname))) | 31.636364 | 93 | 0.627011 |
31027c39f25d2183deb23c3e8d0e7df3068426f0 | 15,950 | py | Python | service/http/tests/test_cli.py | krishsethi19/dffml | 2dd0a9c4a125a9739d27228128bbd381a8e0fef4 | [
"MIT"
] | null | null | null | service/http/tests/test_cli.py | krishsethi19/dffml | 2dd0a9c4a125a9739d27228128bbd381a8e0fef4 | [
"MIT"
] | null | null | null | service/http/tests/test_cli.py | krishsethi19/dffml | 2dd0a9c4a125a9739d27228128bbd381a8e0fef4 | [
"MIT"
] | null | null | null | import os
import json
import pathlib
import tempfile
import contextlib
from http import HTTPStatus
import aiohttp
from dffml.model.slr import SLRModel
from dffml.source.json import JSONSource
from dffml.accuracy import MeanSquaredErrorAccuracy
from dffml import Record, Features, Feature, save, train, accuracy
from dffml.util.asynctestcase import AsyncTestCase
from dffml_service_http.cli import HTTPService, RedirectFormatError
from dffml_service_http.util.testing import ServerRunner, ServerException
from .dataflow import formatter, HELLO_BLANK_DATAFLOW, HELLO_WORLD_DATAFLOW
class TestCreateTLS(AsyncTestCase):
async def test_create(self):
with tempfile.TemporaryDirectory() as tempdir:
with self.subTest(certs="server"):
await HTTPService.createtls.server.cli(
"-bits",
"1024",
"-key",
os.path.join(tempdir, "server.key"),
"-cert",
os.path.join(tempdir, "server.pem"),
)
self.assertTrue(
os.path.isfile(os.path.join(tempdir, "server.key"))
)
self.assertTrue(
os.path.isfile(os.path.join(tempdir, "server.pem"))
)
with self.subTest(certs="client"):
await HTTPService.createtls.client.cli(
"-bits",
"1024",
"-key",
os.path.join(tempdir, "client.key"),
"-cert",
os.path.join(tempdir, "client.pem"),
"-csr",
os.path.join(tempdir, "client.csr"),
"-server-key",
os.path.join(tempdir, "server.key"),
"-server-cert",
os.path.join(tempdir, "server.pem"),
)
self.assertTrue(
os.path.isfile(os.path.join(tempdir, "client.key"))
)
self.assertTrue(
os.path.isfile(os.path.join(tempdir, "client.pem"))
)
self.assertTrue(
os.path.isfile(os.path.join(tempdir, "client.csr"))
)
class TestServer(AsyncTestCase):
def url(self, cli):
return f"http://{cli.addr}:{cli.port}"
@contextlib.asynccontextmanager
async def get(self, cli, path):
async with aiohttp.ClientSession() as session:
async with session.get(self.url(cli) + path) as r:
if r.status != HTTPStatus.OK:
raise ServerException((await r.json())["error"])
yield r
@contextlib.asynccontextmanager
async def post(self, cli, path, *args, **kwargs):
async with aiohttp.ClientSession() as session:
async with session.post(
self.url(cli) + path, *args, **kwargs
) as r:
if r.status != HTTPStatus.OK:
raise ServerException((await r.json())["error"])
yield r
async def test_insecure_off_by_default(self):
self.assertFalse(HTTPService.server().insecure)
async def test_start_insecure(self):
async with ServerRunner.patch(HTTPService.server) as tserver:
await tserver.start(
HTTPService.server.cli("-port", "0", "-insecure")
)
async def test_start(self):
with tempfile.TemporaryDirectory() as tempdir:
await HTTPService.createtls.server.cli(
"-bits",
"2048",
"-key",
os.path.join(tempdir, "server.key"),
"-cert",
os.path.join(tempdir, "server.pem"),
)
async with ServerRunner.patch(HTTPService.server) as tserver:
await tserver.start(
HTTPService.server.cli(
"-port",
"0",
"-key",
os.path.join(tempdir, "server.key"),
"-cert",
os.path.join(tempdir, "server.pem"),
)
)
async def test_portfile(self):
with tempfile.TemporaryDirectory() as tempdir:
portfile_path = pathlib.Path(tempdir, "portfile.int")
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-portfile",
str(portfile_path),
)
)
self.assertTrue(portfile_path.is_file())
self.assertEqual(cli.port, int(portfile_path.read_text()))
async def test_mc_config(self):
with tempfile.TemporaryDirectory() as tempdir:
# URLs for endpoints
hello_world_url: str = "/hello/world"
hello_blank_url: str = "/hello/blank"
# Create the required directory structure
# Create directories for multicomm, dataflow, and dataflow overrides
pathlib.Path(tempdir, "mc").mkdir()
pathlib.Path(tempdir, "mc", "http").mkdir()
pathlib.Path(tempdir, "df").mkdir()
# TODO split config part of dataflow into separate directory
pathlib.Path(tempdir, "config").mkdir()
# Write out multicomm configs
pathlib.Path(tempdir, "mc", "http", "hello_world.json").write_text(
json.dumps(
{
"path": hello_world_url,
"output_mode": "json",
"asynchronous": False,
},
sort_keys=True,
indent=4,
)
)
pathlib.Path(tempdir, "mc", "http", "hello_blank.json").write_text(
json.dumps(
{
"path": hello_blank_url,
"output_mode": "json",
"asynchronous": False,
},
sort_keys=True,
indent=4,
)
)
# Write out dataflow configs
pathlib.Path(tempdir, "df", "hello_world.json").write_text(
json.dumps(
HELLO_WORLD_DATAFLOW.export(), sort_keys=True, indent=4
)
)
pathlib.Path(tempdir, "df", "hello_blank.json").write_text(
json.dumps(
HELLO_BLANK_DATAFLOW.export(), sort_keys=True, indent=4
)
)
# Start the server
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-port",
"0",
"-insecure",
"-mc-config",
tempdir,
"-mc-atomic",
)
)
self.assertEqual(cli.mc_config, tempdir)
# Verify routes were registered and perform as expected
message: str = "Hello World"
with self.subTest(test=message):
# Check that hello world works
async with self.get(cli, hello_world_url) as response:
self.assertEqual(
{"response": message},
list((await response.json()).values())[0],
)
# Check that hello blank works
message: str = "Hello Feedface"
with self.subTest(test=message):
async with self.post(
cli,
hello_blank_url,
json={
"Feedface": [
{
"value": "Feedface",
"definition": formatter.op.inputs[
"data"
].name,
}
]
},
) as response:
self.assertEqual(
{"Feedface": {"response": message}},
await response.json(),
)
async def test_models(self):
with tempfile.TemporaryDirectory() as tempdir:
# Model the HTTP API will pre-load
model = SLRModel(
features=Features(Feature("f1", float, 1)),
predict=Feature("ans", int, 1),
location=tempdir,
)
# y = m * x + b for equation SLR is solving for
m = 5
b = 3
# Train the model
await train(
model, *[{"f1": x, "ans": m * x + b} for x in range(0, 10)]
)
await accuracy(
model,
MeanSquaredErrorAccuracy(),
Feature("ans", int, 1),
*[{"f1": x, "ans": m * x + b} for x in range(10, 20)],
)
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-models",
"mymodel=slr",
"-model-mymodel-location",
tempdir,
"-model-mymodel-features",
"f1:float:1",
"-model-mymodel-predict",
"ans:int:1",
)
)
async with self.post(
cli,
f"/model/mymodel/predict/0",
json={
f"record_{x}": {"features": {"f1": x}}
for x in range(20, 30)
},
) as response:
response = await response.json()
records = response["records"]
self.assertEqual(len(records), 10)
for record in records.values():
should_be = m * record["features"]["f1"] + b
prediction = record["prediction"]["ans"]["value"]
percent_error = abs(should_be - prediction) / should_be
self.assertLess(percent_error, 0.2)
async def test_sources(self):
with tempfile.TemporaryDirectory() as tempdir:
# Source the HTTP API will pre-load
source = JSONSource(
filename=pathlib.Path(tempdir, "source.json"),
allowempty=True,
readwrite=True,
)
# Record the source will have in it
myrecord = Record("myrecord", data={"features": {"f1": 0}})
await save(source, myrecord)
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-sources",
"mysource=json",
"-source-mysource-filename",
str(source.config.filename),
)
)
async with self.get(
cli, "/source/mysource/record/myrecord"
) as r:
self.assertEqual(await r.json(), myrecord.export())
async def test_scorer(self):
with tempfile.TemporaryDirectory() as tempdir:
model = SLRModel(
features=Features(Feature("f1", float, 1)),
predict=Feature("ans", int, 1),
location=tempdir,
)
# y = m * x + b for equation SLR is solving for
m = 5
b = 3
# Train the model
await train(
model, *[{"f1": x, "ans": m * x + b} for x in range(0, 10)]
)
source = JSONSource(
filename=pathlib.Path(tempdir, "source.json"),
allowempty=True,
readwrite=True,
)
# Record the source will have in it
await save(
source,
*[
Record(
str(i),
data={"features": {"f1": x, "ans": (m * x) + b}},
)
for i, x in enumerate(range(10, 20))
],
)
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-models",
"mymodel=slr",
"-model-mymodel-location",
tempdir,
"-model-mymodel-features",
"f1:float:1",
"-model-mymodel-predict",
"ans:int:1",
"-features",
"ans:int:1",
"-sources",
"mysource=json",
"-source-mysource-filename",
str(source.config.filename),
"-scorers",
"myscorer=mse",
)
)
async with self.post(
cli, "/scorer/myscorer/mymodel/score", json=["mysource"]
) as r:
self.assertEqual(await r.json(), {"accuracy": 0.0})
async def test_redirect_format_error(self):
with self.assertRaises(RedirectFormatError):
async with ServerRunner.patch(HTTPService.server) as tserver:
await tserver.start(
# Missing METHOD
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-redirect",
"/",
"/index.html",
)
)
async def test_redirect(self):
with tempfile.TemporaryDirectory() as tempdir:
pathlib.Path(tempdir, "index.html").write_text("Hello World")
pathlib.Path(tempdir, "mysignup").write_text("MySignUp")
async with ServerRunner.patch(HTTPService.server) as tserver:
cli = await tserver.start(
HTTPService.server.cli(
"-insecure",
"-port",
"0",
"-static",
tempdir,
"-redirect",
"GET",
"/",
"/index.html",
"GET",
"/signup",
"/mysignup",
)
)
async with self.get(cli, "/") as r:
self.assertEqual(await r.text(), "Hello World")
async with self.get(cli, "/signup") as r:
self.assertEqual(await r.text(), "MySignUp")
| 38.2494 | 80 | 0.428966 |
82ee52edc451f5c26c7b801c93b24978589df877 | 3,189 | py | Python | automix/rules/veireTransitionsRule.py | MZehren/Automix | dfaa00a9e7c5c0938c0a9d275c07f3a3e5f87e43 | [
"MIT"
] | 18 | 2020-07-20T01:51:40.000Z | 2022-02-25T07:32:11.000Z | automix/rules/veireTransitionsRule.py | MZehren/Automix | dfaa00a9e7c5c0938c0a9d275c07f3a3e5f87e43 | [
"MIT"
] | 2 | 2021-03-23T03:26:02.000Z | 2021-07-19T12:51:25.000Z | automix/rules/veireTransitionsRule.py | MZehren/Automix | dfaa00a9e7c5c0938c0a9d275c07f3a3e5f87e43 | [
"MIT"
] | 5 | 2021-01-03T15:34:28.000Z | 2022-02-22T06:07:06.000Z | import numpy as np
from automix.rules.rule import Rule
from automix.featureExtraction.lowLevel import CoreFinder
from automix.model.classes.signal import Signal
class VeireTransitionRule(Rule):
"""
The transition occurs on specific locations in the mix:
double drop: on each track the transition is on a down to up
A\B dd - du - ud - uu
dd
du x
ud x**
uu x*
* the track A needs a ud 16 bars after the uu
** the track B needs to start 16 bars before the cue
"""
def run(self, mix, boundaries, switches):
tracks = Rule.getTracks(mix, boundaries)
# cf = CoreFinder(parameterIncludeBorders=False)
# for A, B, switchPositions in [(tracks[i], tracks[i + 1], switches[i]) for i in range(len(switches))]:
# start, switch, stop = switchPositions
# # compute if it's a core in all the segments (start-switch and switch-stop) are full segments or not
# # TODO don't call the predict one, it's too slow
# aCore, bCore = [
# cf.predictOne(track.features["samples"],
# Signal(1, times=[track.getTrackTime(start),
# track.getTrackTime(switch),
# track.getTrackTime(stop)]))[0] for track in [A, B]
# ]
# isDoubleDrop = not aCore[0] and aCore[1] and not bCore[0] and bCore[1]
# isRolling = aCore[0] and aCore[1] and not bCore[0] and bCore[1] # TODO: implement the aCore[2] == False
# isRelaxed = aCore[0] and not aCore[1] and not bCore[0] and not aCore[1] #TODO: implement the aCore[0] == start of the track
# if isDoubleDrop or isRolling or isRelaxed:
# return 1
# return 0
if len(tracks) < 2:
return 0
scores = []
for A, B, switchPositions in [(tracks[i], tracks[i + 1], switches[i]) for i in range(len(switches))]:
start, switch, stop = switchPositions
coreStartA, coreSwitchA, coreStopA = [
A.features["core"].getValue(A.getTrackTime(time), toleranceWindow=0.1) for time in switchPositions
]
coreStartB, coreSwitchB, coreStopB = [
B.features["core"].getValue(B.getTrackTime(time), toleranceWindow=0.1) for time in switchPositions
]
isDoubleDrop = not coreStartA and not coreStartB and coreSwitchA and coreSwitchB
isRolling = coreStartA and coreSwitchA and not coreStopA and not coreStartB and coreSwitchB and coreStopB
isRelaxed = coreStartA and not coreSwitchA and not coreStopA and not coreStartB and not coreSwitchB #TODO and start of the song here
if isDoubleDrop:
self.description = "Double drop"
scores.append(1)
elif isRolling:
self.description = "Rolling"
scores.append(1)
elif isRelaxed:
self.description = "Relaxed"
scores.append(1)
else:
scores.append(0)
return np.mean(scores)
| 43.094595 | 145 | 0.580433 |
cea2d137622b00efce97da1d3f1288cfcb473820 | 972 | py | Python | mostPopularMovie.py | kratikaswami/Spark-Projects | b1e2ecc561b69aee76ad42fdbb01d89ff582a9de | [
"Apache-2.0"
] | null | null | null | mostPopularMovie.py | kratikaswami/Spark-Projects | b1e2ecc561b69aee76ad42fdbb01d89ff582a9de | [
"Apache-2.0"
] | null | null | null | mostPopularMovie.py | kratikaswami/Spark-Projects | b1e2ecc561b69aee76ad42fdbb01d89ff582a9de | [
"Apache-2.0"
] | null | null | null | from pyspark import SparkConf, SparkContext
def loadMovieNames():
movieNames = {}
with open("path/u.item", encoding = "ISO-8859-1") as f:
for line in f:
fields = line.split('|')
movieNames[int(fields[0])] = fields[1]
return movieNames
def parsedLines(lines):
movieId = lines.split()[1]
return movieId
conf = SparkConf().setMaster("local").setAppName("Most Popular movie in the data set")
sc = SparkContext(conf = conf)
nameDict = sc.broadcast(loadMovieNames())
input = sc.textFile("path/u.data")
rdd = input.map(parsedLines)
ratings = rdd.map(lambda x: (x,1)).reduceByKey(lambda x,y:x+y)
flipped = ratings.map(lambda xy : (xy[1],xy[0]))
sortedMovies = flipped.sortByKey()
sortedMoviesWithNames = sortedMovies.map(lambda countMovie: (nameDict.value[countMovie[0]], countMovie[1]))
results = sortedMoviesWithNames.collect()
for result in results:
# print(str(result[0])+" "+ str(result[1]))
print(result)
| 27 | 107 | 0.682099 |
8f640e9791895eded84e04d93f00fd45920e8177 | 9,446 | py | Python | demos/s3/s3server.py | mstojcevich/cyclone | fefdc51dbf25b7470467fc8db6cf96d08191cf28 | [
"Apache-2.0"
] | 254 | 2015-01-01T18:48:46.000Z | 2022-02-24T07:01:15.000Z | demos/s3/s3server.py | mstojcevich/cyclone | fefdc51dbf25b7470467fc8db6cf96d08191cf28 | [
"Apache-2.0"
] | 37 | 2015-01-02T10:57:40.000Z | 2020-12-17T21:55:56.000Z | demos/s3/s3server.py | mstojcevich/cyclone | fefdc51dbf25b7470467fc8db6cf96d08191cf28 | [
"Apache-2.0"
] | 64 | 2015-01-01T18:52:47.000Z | 2022-03-19T21:29:48.000Z | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# port to cyclone: took out ioloop initialization, fixed imports and created
# a .tac file.
# gleicon 04/10
"""Implementation of an S3-like storage server based on local files.
Useful to test features that will eventually run on S3, or if you want to
run something locally that was once running on S3.
We don't support all the features of S3, but it does work with the
standard S3 client for the most basic semantics. To use the standard
S3 client with this module:
c = S3.AWSAuthConnection("", "", server="localhost", port=8888,
is_secure=False)
c.create_bucket("mybucket")
c.put("mybucket", "mykey", "a value")
print c.get("mybucket", "mykey").body
"""
from cyclone import escape
from cyclone import web
from twisted.python import log
import datetime
import bisect
import hashlib
import os
import os.path
import urllib
class S3Application(web.Application):
"""Implementation of an S3-like storage server based on local files.
If bucket depth is given, we break files up into multiple directories
to prevent hitting file system limits for number of files in each
directories. 1 means one level of directories, 2 means 2, etc.
"""
def __init__(self, root_directory, bucket_depth=0):
web.Application.__init__(self, [
(r"/", RootHandler),
(r"/([^/]+)/(.+)", ObjectHandler),
(r"/([^/]+)/", BucketHandler),
])
self.directory = os.path.abspath(root_directory)
if not os.path.exists(self.directory):
os.makedirs(self.directory)
self.bucket_depth = bucket_depth
class BaseRequestHandler(web.RequestHandler):
# SUPPORTED_METHODS = ("PUT", "GET", "DELETE", "HEAD")
def render_xml(self, value):
assert isinstance(value, dict) and len(value) == 1
self.set_header("Content-Type", "application/xml; charset=UTF-8")
name = value.keys()[0]
parts = []
parts.append('<' + escape.utf8(name) +
' xmlns="http://doc.s3.amazonaws.com/2006-03-01">')
self._render_parts(value.values()[0], parts)
parts.append('</' + escape.utf8(name) + '>')
self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' +
''.join(parts))
def _render_parts(self, value, parts=[]):
if isinstance(value, basestring):
parts.append(escape.xhtml_escape(value))
elif isinstance(value, int) or isinstance(value, long):
parts.append(str(value))
elif isinstance(value, datetime.datetime):
parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
elif isinstance(value, dict):
for name, subvalue in value.iteritems():
if not isinstance(subvalue, list):
subvalue = [subvalue]
for subsubvalue in subvalue:
parts.append('<' + escape.utf8(name) + '>')
self._render_parts(subsubvalue, parts)
parts.append('</' + escape.utf8(name) + '>')
else:
raise Exception("Unknown S3 value type %r", value)
def _object_path(self, bucket, object_name):
if self.application.bucket_depth < 1:
return os.path.abspath(os.path.join(
self.application.directory, bucket, object_name))
hash = hashlib.md5(object_name).hexdigest()
path = os.path.abspath(os.path.join(
self.application.directory, bucket))
for i in range(self.application.bucket_depth):
path = os.path.join(path, hash[:2 * (i + 1)])
return os.path.join(path, object_name)
class RootHandler(BaseRequestHandler):
def get(self):
names = os.listdir(self.application.directory)
buckets = []
for name in names:
path = os.path.join(self.application.directory, name)
info = os.stat(path)
buckets.append({
"Name": name,
"CreationDate": datetime.datetime.utcfromtimestamp(
info.st_ctime),
})
self.render_xml({"ListAllMyBucketsResult": {
"Buckets": {"Bucket": buckets},
}})
class BucketHandler(BaseRequestHandler):
def get(self, bucket_name):
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
max_keys = int(self.get_argument("max-keys", 50000))
path = os.path.abspath(os.path.join(self.application.directory,
bucket_name))
terse = int(self.get_argument("terse", 0))
if not path.startswith(self.application.directory) or \
not os.path.isdir(path):
raise web.HTTPError(404)
object_names = []
for root, dirs, files in os.walk(path):
for file_name in files:
object_names.append(os.path.join(root, file_name))
skip = len(path) + 1
for i in range(self.application.bucket_depth):
skip += 2 * (i + 1) + 1
object_names = [n[skip:] for n in object_names]
object_names.sort()
contents = []
start_pos = 0
if marker:
start_pos = bisect.bisect_right(object_names, marker, start_pos)
if prefix:
start_pos = bisect.bisect_left(object_names, prefix, start_pos)
truncated = False
for object_name in object_names[start_pos:]:
if not object_name.startswith(prefix):
break
if len(contents) >= max_keys:
truncated = True
break
object_path = self._object_path(bucket_name, object_name)
c = {"Key": object_name}
if not terse:
info = os.stat(object_path)
c.update({
"LastModified": datetime.datetime.utcfromtimestamp(
info.st_mtime),
"Size": info.st_size,
})
contents.append(c)
marker = object_name
self.render_xml({"ListBucketResult": {
"Name": bucket_name,
"Prefix": prefix,
"Marker": marker,
"MaxKeys": max_keys,
"IsTruncated": truncated,
"Contents": contents,
}})
def put(self, bucket_name):
log.msg('bruxao')
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if not path.startswith(self.application.directory) or \
os.path.exists(path):
raise web.HTTPError(403)
os.makedirs(path)
self.finish()
def delete(self, bucket_name):
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if not path.startswith(self.application.directory) or \
not os.path.isdir(path):
raise web.HTTPError(404)
if len(os.listdir(path)) > 0:
raise web.HTTPError(403)
os.rmdir(path)
self.set_status(204)
self.finish()
class ObjectHandler(BaseRequestHandler):
def get(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if not path.startswith(self.application.directory) or \
not os.path.isfile(path):
raise web.HTTPError(404)
info = os.stat(path)
self.set_header("Content-Type", "application/unknown")
self.set_header("Last-Modified", datetime.datetime.utcfromtimestamp(
info.st_mtime))
object_file = open(path, "r")
try:
self.finish(object_file.read())
finally:
object_file.close()
def put(self, bucket, object_name):
object_name = urllib.unquote(object_name)
bucket_dir = os.path.abspath(os.path.join(
self.application.directory, bucket))
if not bucket_dir.startswith(self.application.directory) or \
not os.path.isdir(bucket_dir):
raise web.HTTPError(404)
path = self._object_path(bucket, object_name)
if not path.startswith(bucket_dir) or os.path.isdir(path):
raise web.HTTPError(403)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
object_file = open(path, "w")
object_file.write(self.request.body)
object_file.close()
self.finish()
def delete(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if not path.startswith(self.application.directory) or \
not os.path.isfile(path):
raise web.HTTPError(404)
os.unlink(path)
self.set_status(204)
self.finish()
| 37.484127 | 76 | 0.602583 |
6bcf6b0ed097fdea2e98edc3650b36f355235eb8 | 2,704 | py | Python | weixin/weixin.py | YifeiYang210/crawler_html2pdf | acbf8abee631606fe931d8188a4bd2f4dd6f0b47 | [
"Apache-2.0"
] | 710 | 2017-02-09T10:05:56.000Z | 2019-12-06T08:12:29.000Z | weixin/weixin.py | zhuxuda/python_scripts | e442c25494489a9d75190e985426827f1c18e9bc | [
"Apache-2.0"
] | 33 | 2017-02-10T10:03:33.000Z | 2019-07-18T13:45:19.000Z | weixin/weixin.py | zhuxuda/python_scripts | e442c25494489a9d75190e985426827f1c18e9bc | [
"Apache-2.0"
] | 627 | 2017-02-10T05:04:56.000Z | 2019-12-12T07:53:14.000Z | # encoding: utf-8
import json
import re
import time
from http.cookies import SimpleCookie
import jieba.analyse
import matplotlib.pyplot as plt
import requests
from scipy.misc import imread
from wordcloud import WordCloud
from pymongo import MongoClient
class Conn(object):
client = MongoClient('localhost', 27017)
db = client['weixin-comment']
@classmethod
def insert_many(cls, data):
cls.db['comments'].insert_many(data)
@classmethod
def query(cls):
data = cls.db['comments'].find()
return data
conn = Conn()
raw_cookie = """
gsScrollPos-5517=; tvfe_boss_uuid=9c139f72f8ae693f; pac_uid=1_253421576; pgv_pvi=5182785536; RK=0IMfVbYuWK;
"""
cookie = SimpleCookie(raw_cookie)
requests_cookies = dict([(c, cookie[c].value) for c in cookie])
def main():
# 普通留言, 精选留言总数
normal_count, selected_count = 141, 100
# 普通留言url
normal_url = "https://mp.weixin.qq.com/misc/appmsgcomment?" \
"action=list_comment&" \
"mp_version=7&" \
"type=0&" \
"comment_id=2881104117&" \
"begin={begin}&" \
"count=10&" \
"token=1300595798&" \
"lang=zh_CN"
# 精选留言url
selected_url = "https://mp.weixin.qq.com/misc/appmsgcomment?action=list_comment&mp_version=7&type=1" \
"&begin={begin}&count=10&comment_id=2881104117&token=1300595798&lang=zh_CN"
dd = dict([(normal_count, selected_url), (selected_count, normal_url)])
for k, v in dd.items():
crawler(k, v)
def crawler(count, url):
for i in range(0, count, 10):
r = requests.get(url.format(begin=i), cookies=requests_cookies)
match = re.search(r'"comment":(\[\{.*\}\])', r.text, re.S)
if match:
data = json.loads(match.group(1), encoding="utf-8")
conn.insert_many(data)
time.sleep(1)
def display():
# 读取数据
data = conn.query()
for c in data:
yield c.get("content")
def word_segment(texts):
# 分词处理
jieba.analyse.set_stop_words("./stopwords.txt")
for text in texts:
tags = jieba.analyse.extract_tags(text, topK=20)
yield " ".join(tags)
def generate_img(texts):
# 生成词云图片
data = " ".join(text for text in texts)
mask_img = imread('./python-logo.png', flatten=True)
wordcloud = WordCloud(
font_path='/Library/Fonts//华文黑体.ttf',
background_color='white',
mask=mask_img
).generate(data)
plt.imshow(wordcloud)
plt.axis('off')
plt.savefig('./wordcloud.jpg', dpi=600)
if __name__ == '__main__':
main()
# 生成词云方法
# generate_img(word_segment(display()))
| 24.581818 | 107 | 0.614645 |
8c09ad963573e7c4a489f75a190ba36193247d1f | 1,911 | py | Python | QLabel/demo01.py | Wananbrstl/my_pyqt5_test | 68f5ccf09fd5e96a3e1cb4118b49bc53f6a75822 | [
"MIT"
] | 1 | 2021-07-27T14:13:43.000Z | 2021-07-27T14:13:43.000Z | QLabel/demo01.py | Wananbrstl/my_pyqt5_test | 68f5ccf09fd5e96a3e1cb4118b49bc53f6a75822 | [
"MIT"
] | null | null | null | QLabel/demo01.py | Wananbrstl/my_pyqt5_test | 68f5ccf09fd5e96a3e1cb4118b49bc53f6a75822 | [
"MIT"
] | null | null | null | #! /home/lbn/miniconda3/envs/pyqt/bin/python
# encoding : utf-8
import sys
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
class my_label(QLabel):
def __init__(self, *args, antiliasing=True, **kwargs):
super(my_label, self).__init__(*args, **kwargs)
self.Antialiasing = antiliasing
self.setFixedSize(200, 200)
self.radius = 100
# create QPixmap class
self.target = QPixmap(self.size())
self.target.fill(Qt.transparent)
p = QPixmap("Data/Images/head.jpg").scaled(200, 200, Qt.KeepAspectRatioByExpanding, Qt.SmoothTransformation)
painter = QPainter(self.target)
path = QPainterPath()
path.addRoundedRect(0, 0, self.width(), self.height(), self.radius, self.radius)
painter.setClipPath(path)
painter.drawPixmap(0,0,p)
self.setPixmap(self.target)
if self.Antialiasing:
painter.setRenderHint(QPainter.Antialiasing, True)
painter.setRenderHint(QPainter.HighQualityAntialiasing, True)
painter.setRenderHint(QPainter.SmoothPixmapTransform, True)
"""
painter = QPainter(self.target)
if self.Antialiasing:
painter.setRenderHint(QPainter.Antialiasing, True)
painter.setRenderHint(QPainter.HighQualityAntialiasing, True)
painter.setRenderHint(QPainter.SmoothPixmapTransform, True)
path = QPainterPath()
path.addRoundedRect(
-1, 0, self.width(), self.height(), self.radius, self.radius)
painter.setClipPath(path)
painter.drawPixmap(-1, 0, p)
self.setPixmap(self.target)
"""
if __name__ == '__main__':
app = QApplication(sys.argv)
wind = QWidget()
wind.setWindowTitle("Circle test")
layout = QHBoxLayout(wind)
label = my_label()
layout.addWidget(label)
wind.show()
sys.exit(app.exec_())
| 34.745455 | 116 | 0.658817 |
77fb2e701987e765b89e98f4f241c9281bf5381d | 4,192 | py | Python | airflow/providers/datadog/sensors/datadog.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 15,947 | 2019-01-05T13:51:02.000Z | 2022-03-31T23:33:16.000Z | airflow/providers/datadog/sensors/datadog.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 14,603 | 2019-01-05T09:43:19.000Z | 2022-03-31T23:11:59.000Z | airflow/providers/datadog/sensors/datadog.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 8,429 | 2019-01-05T19:45:47.000Z | 2022-03-31T22:13:01.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Callable, Dict, List, Optional
from datadog import api
from airflow.exceptions import AirflowException
from airflow.providers.datadog.hooks.datadog import DatadogHook
from airflow.sensors.base import BaseSensorOperator
class DatadogSensor(BaseSensorOperator):
"""
A sensor to listen, with a filter, to datadog event streams and determine
if some event was emitted.
Depends on the datadog API, which has to be deployed on the same server where
Airflow runs.
:param datadog_conn_id: The connection to datadog, containing metadata for api keys.
:type datadog_conn_id: str
:param from_seconds_ago: POSIX timestamp start (default 3600).
:type from_seconds_ago: int
:param up_to_seconds_from_now: POSIX timestamp end (default 0).
:type up_to_seconds_from_now: int
:param priority: Priority of your events, either low or normal.
:type priority: Optional[str]
:param sources: A comma separated list indicating what tags, if any,
should be used to filter the list of monitors by scope
:type sources: Optional[str]
:param tags: Get datadog events from specific sources.
:type tags: Optional[List[str]]
:param response_check: A check against the ‘requests’ response object. The callable takes
the response object as the first positional argument and optionally any number of
keyword arguments available in the context dictionary. It should return True for
‘pass’ and False otherwise.
:param response_check: Optional[Callable[[Dict[str, Any]], bool]]
"""
ui_color = '#66c3dd'
def __init__(
self,
*,
datadog_conn_id: str = 'datadog_default',
from_seconds_ago: int = 3600,
up_to_seconds_from_now: int = 0,
priority: Optional[str] = None,
sources: Optional[str] = None,
tags: Optional[List[str]] = None,
response_check: Optional[Callable[[Dict[str, Any]], bool]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.datadog_conn_id = datadog_conn_id
self.from_seconds_ago = from_seconds_ago
self.up_to_seconds_from_now = up_to_seconds_from_now
self.priority = priority
self.sources = sources
self.tags = tags
self.response_check = response_check
def poke(self, context: Dict[str, Any]) -> bool:
# This instantiates the hook, but doesn't need it further,
# because the API authenticates globally (unfortunately),
# but for airflow this shouldn't matter too much, because each
# task instance runs in its own process anyway.
DatadogHook(datadog_conn_id=self.datadog_conn_id)
response = api.Event.query(
start=self.from_seconds_ago,
end=self.up_to_seconds_from_now,
priority=self.priority,
sources=self.sources,
tags=self.tags,
)
if isinstance(response, dict) and response.get('status', 'ok') != 'ok':
self.log.error("Unexpected Datadog result: %s", response)
raise AirflowException("Datadog returned unexpected result")
if self.response_check:
# run content check on response
return self.response_check(response)
# If no check was inserted, assume any event that matched yields true.
return len(response) > 0
| 40.699029 | 93 | 0.696803 |
0d087eb1d91bb00190c6a3b8381d7e082d80c292 | 2,720 | py | Python | meetings_management/migrations/0003_auto_20170505_2142.py | lfbos/meeting_room_management | 5fe0282925ec9b69806e6a351bcc4c6904a00bed | [
"MIT"
] | 1 | 2017-05-22T23:02:54.000Z | 2017-05-22T23:02:54.000Z | meetings_management/migrations/0003_auto_20170505_2142.py | lfbos/meeting_room_management | 5fe0282925ec9b69806e6a351bcc4c6904a00bed | [
"MIT"
] | null | null | null | meetings_management/migrations/0003_auto_20170505_2142.py | lfbos/meeting_room_management | 5fe0282925ec9b69806e6a351bcc4c6904a00bed | [
"MIT"
] | 3 | 2017-06-12T03:15:40.000Z | 2019-11-22T18:44:49.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-05 21:42
from __future__ import unicode_literals
import datetime
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
import uuid
class Migration(migrations.Migration):
dependencies = [
('meetings_management', '0002_auto_20170505_2139'),
]
operations = [
migrations.CreateModel(
name='MeetingRoomReservation',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('metadata', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default={}, verbose_name='metadata')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='date and time of creation')),
('updated', models.DateTimeField(auto_now=True, verbose_name='date and time of last update')),
('reserved_from', models.DateTimeField(verbose_name='reserved from')),
('reserved_until', models.DateTimeField(verbose_name='reserved until')),
('amount', models.IntegerField(verbose_name='amount of people')),
('supplies', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, size=None, verbose_name='supplies to use')),
],
options={
'verbose_name': 'meeting room reservation',
'verbose_name_plural': 'meeting room reservations',
},
),
migrations.AlterField(
model_name='meetingroom',
name='available_from',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 5, 6, 0, 0, 629370, tzinfo=utc), verbose_name='available from'),
),
migrations.AlterField(
model_name='meetingroom',
name='available_until',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 5, 21, 0, 0, 629468, tzinfo=utc), verbose_name='available until'),
),
migrations.AddField(
model_name='meetingroomreservation',
name='meeting_room',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reservations', to='meetings_management.MeetingRoom'),
),
migrations.AddField(
model_name='meetingroomreservation',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reservations', to='meetings_management.MeetingRoomUser'),
),
]
| 46.101695 | 171 | 0.650735 |
b03eef1f11a7238bc5c1f9604307dddd8d8191fc | 9,201 | py | Python | tests/test_wificontrol.py | TopperBG/pywificontrol | b4e4e6b99b75878a3b8b09b346e5270a866ccf51 | [
"BSD-3-Clause"
] | 115 | 2017-10-19T17:23:13.000Z | 2022-02-01T21:54:45.000Z | tests/test_wificontrol.py | TopperBG/pywificontrol | b4e4e6b99b75878a3b8b09b346e5270a866ccf51 | [
"BSD-3-Clause"
] | 8 | 2017-10-21T03:56:33.000Z | 2022-01-04T12:18:13.000Z | tests/test_wificontrol.py | TopperBG/pywificontrol | b4e4e6b99b75878a3b8b09b346e5270a866ccf51 | [
"BSD-3-Clause"
] | 46 | 2017-10-21T02:17:33.000Z | 2022-01-15T20:53:18.000Z | # Written by Ivan Sapozhkov and Denis Chagin <denis.chagin@emlid.com>
#
# Copyright (c) 2016, Emlid Limited
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import pytest
import mock
from wificontrol import WiFiControl
@pytest.fixture
def ssid():
network = {
'ssid': 'Test'
}
return network
class FakeWiFiControl(WiFiControl):
def __init__(self):
self.wifi = mock.MagicMock()
self.wpasupplicant = mock.MagicMock()
self.hotspot = mock.MagicMock()
class TestWiFiControl:
def setup_method(self):
self.manager = FakeWiFiControl()
def test_host_mode(self):
self.manager.hotspot.started = mock.Mock(return_value=False)
self.manager.start_host_mode()
assert self.manager.wpasupplicant.stop.call_count == 1
assert self.manager.hotspot.started.call_count == 1
assert self.manager.hotspot.start.call_count == 1
def test_client_mode(self):
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.start_client_mode()
assert self.manager.hotspot.stop.call_count == 1
assert self.manager.wpasupplicant.started.call_count == 1
assert self.manager.wpasupplicant.start.call_count == 1
def test_wifi_turn_on(self):
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.hotspot.started = mock.Mock(return_value=False)
self.manager.turn_on_wifi()
assert self.manager.wifi.unblock.call_count == 1
assert self.manager.wpasupplicant.started.call_count == 1
assert self.manager.wpasupplicant.start.call_count == 1
self.manager.wpasupplicant.started.return_value = True
assert self.manager.get_wifi_turned_on() is True
def test_wifi_turn_off(self):
self.manager.wpasupplicant.started = mock.Mock(return_value=True)
self.manager.hotspot.started = mock.Mock(return_value=False)
self.manager.turn_off_wifi()
assert self.manager.wifi.block.call_count == 1
assert self.manager.hotspot.stop.call_count == 1
assert self.manager.wpasupplicant.stop.call_count == 1
self.manager.wpasupplicant.started.return_value = False
assert self.manager.get_wifi_turned_on() is False
def test_wifi_turn_on_if_wifi_is_on(self):
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.hotspot.started = mock.Mock(return_value=True)
self.manager.turn_on_wifi()
assert self.manager.wifi.unblock.call_count == 0
assert self.manager.wpasupplicant.started.call_count == 1
assert self.manager.hotspot.started.call_count == 1
assert self.manager.wpasupplicant.start.call_count == 0
assert self.manager.hotspot.start.call_count == 0
def test_network_add(self, ssid):
self.manager.add_network(ssid)
assert self.manager.wpasupplicant.add_network.is_called_once_with(ssid)
def test_network_remove(self, ssid):
self.manager.remove_network(ssid)
assert self.manager.wpasupplicant.remove_network.is_called_once_with(ssid)
def test_status_get(self, ssid):
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.hotspot.started = mock.Mock(return_value=True)
state, status = self.manager.get_status()
assert state == self.manager.HOST_STATE
assert status is None
self.manager.wpasupplicant.started.return_value = True
self.manager.hotspot.started.return_value = False
self.manager.wpasupplicant.get_status = mock.Mock(return_value=ssid)
state, status = self.manager.get_status()
assert state == self.manager.WPA_STATE
assert status == ssid
def test_start_connection(self, ssid):
def start_connecting(*args):
self.manager.hotspot.started.return_value = False
self.manager.revert_on_connect_failure(result=None)
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.wpasupplicant.start_connecting.side_effect = start_connecting
self.manager.hotspot.started = mock.Mock(return_value=True)
self.manager.start_connecting(ssid)
assert self.manager.wpasupplicant.started.call_count == 1
assert self.manager.hotspot.stop.call_count == 1
assert self.manager.wpasupplicant.start.call_count == 1
args = (ssid, self.manager.revert_on_connect_failure, None, 10)
assert self.manager.wpasupplicant.start_connecting.is_called_once_with(args)
assert self.manager.hotspot.started.call_count == 1
assert self.manager.wpasupplicant.stop.call_count == 1
assert self.manager.hotspot.start.call_count == 1
def test_reconnection(self, ssid):
def start_connecting(result, callback, args, timeout):
self.manager.hotspot.started.return_value = False
if args:
callback({}, *args)
else:
callback(result)
self.manager.wpasupplicant.started = mock.Mock(return_value=False)
self.manager.wpasupplicant.start_connecting.side_effect = start_connecting
self.manager.hotspot.started = mock.Mock(return_value=True)
self.manager.start_connecting(ssid, callback=self.manager.reconnect,
args=(ssid,))
assert self.manager.wpasupplicant.start_connecting.call_count == 2
def test_supplicant_functions(self):
self.manager.scan()
assert self.manager.wpasupplicant.scan.call_count == 1
self.manager.get_scan_results()
assert self.manager.wpasupplicant.get_scan_results.call_count == 1
self.manager.get_added_networks()
assert self.manager.wpasupplicant.get_added_networks.call_count == 1
self.manager.get_ip()
assert self.manager.wifi.get_device_ip.call_count == 1
self.manager.stop_connecting()
assert self.manager.wpasupplicant.stop_connecting.call_count == 1
self.manager.disconnect()
assert self.manager.wpasupplicant.disconnect.call_count == 1
self.manager.get_device_name()
assert self.manager.hotspot.get_host_name.call_count == 1
self.manager.get_hostap_name()
assert self.manager.hotspot.get_hostap_name.call_count == 1
name = 'test'
self.manager.set_device_names(name)
assert self.manager.wpasupplicant.set_p2p_name.call_count == 1
assert self.manager.wpasupplicant.set_p2p_name.is_called_once_with(name)
assert self.manager.hotspot.set_hostap_name.call_count == 1
assert self.manager.hotspot.set_hostap_name.is_called_once_with(name)
assert self.manager.hotspot.set_host_name.call_count == 1
assert self.manager.hotspot.set_host_name.is_called_once_with(name)
assert self.manager.wifi.restart_dns.call_count == 1
self.manager.set_hostap_password(name)
assert self.manager.hotspot.set_hostap_password.is_called_once_with(name)
def test_verify_names(self):
name = 'test'
mac_addr = '11:22:33:44:55:66'
self.manager.hotspot.get_host_name.return_value = name
self.manager.wpasupplicant.get_p2p_name.return_value = name
self.manager.hotspot.get_hostap_name.return_value = "{}{}".format(name, mac_addr[-6:])
self.manager.hotspot.get_device_mac.return_value = mac_addr[-6:]
assert self.manager.verify_hostap_name(name)
assert self.manager.verify_device_names(name)
assert self.manager.hotspot.get_host_name.call_count == 1
assert self.manager.wpasupplicant.get_p2p_name.call_count == 1
| 38.3375 | 94 | 0.716335 |
935e9eb3da134ef7a884eb05d96c06c9b29f9ac1 | 509 | py | Python | video/migrations/0007_auto_20190614_0943.py | scintiller/OnlineJudge | 4e66da0e366c8b950a1ccae2b435b81d9fe07e6c | [
"MIT"
] | null | null | null | video/migrations/0007_auto_20190614_0943.py | scintiller/OnlineJudge | 4e66da0e366c8b950a1ccae2b435b81d9fe07e6c | [
"MIT"
] | 6 | 2020-06-05T21:37:42.000Z | 2022-01-13T01:19:55.000Z | video/migrations/0007_auto_20190614_0943.py | scintiller/OnlineJudge | 4e66da0e366c8b950a1ccae2b435b81d9fe07e6c | [
"MIT"
] | null | null | null | # Generated by Django 2.1.7 on 2019-06-14 09:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('video', '0006_auto_20190614_0923'),
]
operations = [
migrations.AlterField(
model_name='solutionvideo',
name='problem',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='solution_video', to='problem.Problem'),
),
]
| 25.45 | 134 | 0.656189 |
ffa88825b8361686f9532a474d3d9fd82e993b1d | 593 | py | Python | commerce/migrations/0043_auto_20201130_2238.py | PragmaticMates/django-commerce | b992bf4c81ca6dfaad9ccd423d25fba9d255f159 | [
"Apache-2.0"
] | 4 | 2017-09-08T19:22:19.000Z | 2021-12-21T17:55:29.000Z | commerce/migrations/0043_auto_20201130_2238.py | PragmaticMates/django-commerce | b992bf4c81ca6dfaad9ccd423d25fba9d255f159 | [
"Apache-2.0"
] | null | null | null | commerce/migrations/0043_auto_20201130_2238.py | PragmaticMates/django-commerce | b992bf4c81ca6dfaad9ccd423d25fba9d255f159 | [
"Apache-2.0"
] | 1 | 2021-10-31T06:31:18.000Z | 2021-10-31T06:31:18.000Z | # Generated by Django 2.2.4 on 2020-11-30 21:38
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('commerce', '0042_auto_20201130_2222'),
]
operations = [
migrations.AlterField(
model_name='discount',
name='content_types',
field=models.ManyToManyField(blank=True, to='contenttypes.ContentType', verbose_name='content types'),
),
]
| 26.954545 | 114 | 0.677909 |
aa35e6db53aa9cf1319a4265030f9a108cf28015 | 23,165 | py | Python | samples/client/petstore/python/swagger_client/apis/user_api.py | s4id/swagger-codegen | 084e7f7199543a1349e96f49cb54d808051d78aa | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/python/swagger_client/apis/user_api.py | s4id/swagger-codegen | 084e7f7199543a1349e96f49cb54d808051d78aa | [
"Apache-2.0"
] | 5 | 2015-10-22T19:18:35.000Z | 2016-03-10T16:52:14.000Z | samples/client/petstore/python/swagger_client/apis/user_api.py | s4id/swagger-codegen | 084e7f7199543a1349e96f49cb54d808051d78aa | [
"Apache-2.0"
] | 1 | 2021-11-08T19:37:36.000Z | 2021-11-08T19:37:36.000Z | # coding: utf-8
"""
UserApi.py
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UserApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_user(self, **kwargs):
"""
Create user
This can only be done by the logged in user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_user(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param User body: Created user object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_user" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user'.replace('{format}', 'json')
method = 'POST'
path_params = {}
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_users_with_array_input(self, **kwargs):
"""
Creates list of users with given input array
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_users_with_array_input(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[User] body: List of user object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_users_with_array_input" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/createWithArray'.replace('{format}', 'json')
method = 'POST'
path_params = {}
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_users_with_list_input(self, **kwargs):
"""
Creates list of users with given input array
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_users_with_list_input(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[User] body: List of user object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_users_with_list_input" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/createWithList'.replace('{format}', 'json')
method = 'POST'
path_params = {}
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def login_user(self, **kwargs):
"""
Logs user into the system
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.login_user(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: The user name for login
:param str password: The password for login in clear text
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method login_user" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/login'.replace('{format}', 'json')
method = 'GET'
path_params = {}
query_params = {}
if 'username' in params:
query_params['username'] = params['username']
if 'password' in params:
query_params['password'] = params['password']
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def logout_user(self, **kwargs):
"""
Logs out current logged in user session
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.logout_user(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logout_user" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/logout'.replace('{format}', 'json')
method = 'GET'
path_params = {}
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_user_by_name(self, username, **kwargs):
"""
Get user by user name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_user_by_name(username, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: The name that needs to be fetched. Use user1 for testing. (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
# verify the required parameter 'username' is set
if username is None:
raise ValueError("Missing the required parameter `username` when calling `get_user_by_name`")
all_params = ['username']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_by_name" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/{username}'.replace('{format}', 'json')
method = 'GET'
path_params = {}
if 'username' in params:
path_params['username'] = params['username']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='User',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_user(self, username, **kwargs):
"""
Updated user
This can only be done by the logged in user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_user(username, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: name that need to be deleted (required)
:param User body: Updated user object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
# verify the required parameter 'username' is set
if username is None:
raise ValueError("Missing the required parameter `username` when calling `update_user`")
all_params = ['username', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/{username}'.replace('{format}', 'json')
method = 'PUT'
path_params = {}
if 'username' in params:
path_params['username'] = params['username']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_user(self, username, **kwargs):
"""
Delete user
This can only be done by the logged in user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user(username, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: The name that needs to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
# verify the required parameter 'username' is set
if username is None:
raise ValueError("Missing the required parameter `username` when calling `delete_user`")
all_params = ['username']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
resource_path = '/user/{username}'.replace('{format}', 'json')
method = 'DELETE'
path_params = {}
if 'username' in params:
path_params['username'] = params['username']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 35.151745 | 105 | 0.518757 |
b8457125704cbf98a6c86aecad844188b710377b | 5,688 | py | Python | InnerEye/ML/Histopathology/datasets/panda_dataset.py | faz1993/InnerEye-DeepLearning | fb258d5c9a3ba18565b5a67e7ac1f00127d9ecb9 | [
"MIT"
] | 402 | 2020-09-22T16:38:16.000Z | 2022-03-30T09:56:03.000Z | InnerEye/ML/Histopathology/datasets/panda_dataset.py | wensincai/InnerEye-DeepLearning | ccb53d01ad0f1c20336588c0066059b8de5266fd | [
"MIT"
] | 259 | 2020-09-23T09:32:33.000Z | 2022-03-30T18:15:01.000Z | InnerEye/ML/Histopathology/datasets/panda_dataset.py | wensincai/InnerEye-DeepLearning | ccb53d01ad0f1c20336588c0066059b8de5266fd | [
"MIT"
] | 112 | 2020-09-23T00:12:58.000Z | 2022-03-31T07:39:55.000Z | # ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
from pathlib import Path
from typing import Any, Dict, Union, Optional
import pandas as pd
from cucim import CuImage
from health_ml.utils import box_utils
from monai.config import KeysCollection
from monai.data.image_reader import ImageReader, WSIReader
from monai.transforms import MapTransform
from InnerEye.ML.Histopathology.datasets.base_dataset import SlidesDataset
class PandaDataset(SlidesDataset):
"""Dataset class for loading files from the PANDA challenge dataset.
Iterating over this dataset returns a dictionary following the `SlideKey` schema plus meta-data
from the original dataset (`'data_provider'`, `'isup_grade'`, and `'gleason_score'`).
Ref.: https://www.kaggle.com/c/prostate-cancer-grade-assessment/overview
"""
SLIDE_ID_COLUMN = 'image_id'
IMAGE_COLUMN = 'image'
MASK_COLUMN = 'mask'
LABEL_COLUMN = 'isup_grade'
METADATA_COLUMNS = ('data_provider', 'isup_grade', 'gleason_score')
DEFAULT_CSV_FILENAME = "train.csv"
def __init__(self,
root: Union[str, Path],
dataset_csv: Optional[Union[str, Path]] = None,
dataset_df: Optional[pd.DataFrame] = None) -> None:
super().__init__(root, dataset_csv, dataset_df, validate_columns=False)
# PANDA CSV does not come with paths for image and mask files
slide_ids = self.dataset_df.index
self.dataset_df[self.IMAGE_COLUMN] = "train_images/" + slide_ids + ".tiff"
self.dataset_df[self.MASK_COLUMN] = "train_label_masks/" + slide_ids + "_mask.tiff"
self.validate_columns()
# MONAI's convention is that dictionary transforms have a 'd' suffix in the class name
class ReadImaged(MapTransform):
"""Basic transform to read image files."""
def __init__(self, reader: ImageReader, keys: KeysCollection,
allow_missing_keys: bool = False, **kwargs: Any) -> None:
super().__init__(keys, allow_missing_keys=allow_missing_keys)
self.reader = reader
self.kwargs = kwargs
def __call__(self, data: Dict) -> Dict:
for key in self.keys:
if key in data or not self.allow_missing_keys:
data[key] = self.reader.read(data[key], **self.kwargs)
return data
class LoadPandaROId(MapTransform):
"""Transform that loads a pathology slide and mask, cropped to the mask bounding box (ROI).
Operates on dictionaries, replacing the file paths in `image_key` and `mask_key` with the
respective loaded arrays, in (C, H, W) format. Also adds the following meta-data entries:
- `'location'` (tuple): top-right coordinates of the bounding box
- `'size'` (tuple): width and height of the bounding box
- `'level'` (int): chosen magnification level
- `'scale'` (float): corresponding scale, loaded from the file
"""
def __init__(self, reader: WSIReader, image_key: str = 'image', mask_key: str = 'mask',
level: int = 0, margin: int = 0, **kwargs: Any) -> None:
"""
:param reader: And instance of MONAI's `WSIReader`.
:param image_key: Image key in the input and output dictionaries.
:param mask_key: Mask key in the input and output dictionaries.
:param level: Magnification level to load from the raw multi-scale files.
:param margin: Amount in pixels by which to enlarge the estimated bounding box for cropping.
"""
super().__init__([image_key, mask_key], allow_missing_keys=False)
self.reader = reader
self.image_key = image_key
self.mask_key = mask_key
self.level = level
self.margin = margin
self.kwargs = kwargs
def _get_bounding_box(self, mask_obj: CuImage) -> box_utils.Box:
# Estimate bounding box at the lowest resolution (i.e. highest level)
highest_level = mask_obj.resolutions['level_count'] - 1
scale = mask_obj.resolutions['level_downsamples'][highest_level]
mask, _ = self.reader.get_data(mask_obj, level=highest_level) # loaded as RGB PIL image
foreground_mask = mask[0] > 0 # PANDA segmentation mask is in 'R' channel
bbox = scale * box_utils.get_bounding_box(foreground_mask).add_margin(self.margin)
return bbox
def __call__(self, data: Dict) -> Dict:
mask_obj: CuImage = self.reader.read(data[self.mask_key])
image_obj: CuImage = self.reader.read(data[self.image_key])
level0_bbox = self._get_bounding_box(mask_obj)
# cuCIM/OpenSlide take absolute location coordinates in the level 0 reference frame,
# but relative region size in pixels at the chosen level
scale = mask_obj.resolutions['level_downsamples'][self.level]
scaled_bbox = level0_bbox / scale
get_data_kwargs = dict(location=(level0_bbox.x, level0_bbox.y),
size=(scaled_bbox.w, scaled_bbox.h),
level=self.level)
mask, _ = self.reader.get_data(mask_obj, **get_data_kwargs) # type: ignore
data[self.mask_key] = mask[:1] # PANDA segmentation mask is in 'R' channel
data[self.image_key], _ = self.reader.get_data(image_obj, **get_data_kwargs) # type: ignore
data.update(get_data_kwargs)
data['scale'] = scale
mask_obj.close()
image_obj.close()
return data
| 46.243902 | 100 | 0.654184 |
1ff341ca47a82fdd43bc248c14e3960133a00392 | 13,695 | py | Python | mdMakelocal.py | ludos1978/Markdown_MakeLocal | 473248e584d08c2404e7ea5bcf4baa1c8f4526aa | [
"MIT"
] | 1 | 2021-12-07T19:56:48.000Z | 2021-12-07T19:56:48.000Z | mdMakelocal.py | ludos1978/Markdown_MakeLocal | 473248e584d08c2404e7ea5bcf4baa1c8f4526aa | [
"MIT"
] | null | null | null | mdMakelocal.py | ludos1978/Markdown_MakeLocal | 473248e584d08c2404e7ea5bcf4baa1c8f4526aa | [
"MIT"
] | null | null | null | import os, sys, re
import requests
import glob
import argparse
import uuid
import hashlib
import markdown
import mimetypes
import threading
import lxml.etree
import urllib.parse
import time
class _Getch:
"""Gets a single character from standard input. Does not echo to the
screen."""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self): return self.impl()
class _GetchUnix:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
getch = _Getch()
def getFilenameFromHeaders (headers, url):
""" read the filetype from the header of a downloaded file using content-disposition, from content-type, or from the filename """
# try to use the content-displisiton filename
contentDisposition = headers.get('content-disposition')
if contentDisposition:
filenames = re.findall('filename=(.+)', contentDisposition)
if type(filenames) is list:
filename = filenames[0]
filename = filename.strip("'").strip('"').strip()
if filename:
return filename
# get filetype from headers
fileExtension = mimetypes.guess_extension(headers['content-type'].partition(';')[0].strip())
# try to use the url as filename
urlPath = urllib.parse.urlparse(url)
filename = os.path.basename(urlPath.path) # Output: 09-09-201315-47-571378756077.jpg
urlFileOnlyName, urlFileExtension = os.path.splitext(filename)
# if filename contains an extension
if urlFileExtension:
if filename:
return filename
# no file extension in filename
else:
return filename + fileExtension
# use a random id
return uuid.uuid4() + fileExtension
def getUrlsInMarkdown(pMarkdownFilename):
""" get a list of all urls referenced as <img> in a markdown file """
urls = []
with open(pMarkdownFilename, "r") as markdownFile:
markdownFileContent = markdownFile.read()
markdownFileAsMarkdown = bytes('<?xml version="1.0" encoding="utf8"?>\n<div>\n' + markdown.markdown(markdownFileContent) + '</div>\n', encoding='utf8')
doc = lxml.etree.fromstring(markdownFileAsMarkdown)
# print ("test")
# print (dir(doc))
# print (doc.items())
# include images
for link in doc.xpath('//img'):
linkSrc = link.get('src')
if (linkSrc.startswith('http')):
urls.append(linkSrc)
# include pdf files from archive.org
for link in doc.xpath('//a'):
linkSrc = link.get('href')
# if (linkSrc.startswith('http') and linkSrc.endswith('pdf')):
if linkSrc.endswith('pdf'):
# insert _if (direct download of pdf's)
if linkSrc.startswith("https://web.archive.org/"):
urls.append(linkSrc)
return urls
def replacemany(adict, astring):
""" replace multiple keys with values from adic in astring """
pat = '|'.join(re.escape(s) for s in adict)
there = re.compile(pat)
def onerepl(mo): return adict[mo.group()]
return there.sub(onerepl, astring)
class Downloader(threading.Thread):
""" threaded file downloading : generates filename, tries to prevent overwriting files by adding md5 sum to filename if file exists """
def __init__(self, fileUrl, relativePath, keepAllFiles):
super(Downloader, self).__init__()
# the text in the markdown file
self.fileUrl = fileUrl
# the file to download
self.downloadUrl = fileUrl
if ("/http://" in fileUrl):
self.downloadUrl = "if_/http://".join(fileUrl.split("/http://"))
else:
self.downloadUrl = "if_/https://".join(fileUrl.split("/https://"))
# the new filename
self.finalFilePath = self.fileUrl
self.relativePath = relativePath
# by default leave the original path intact
self.fileTitle = "undefined"
self.keepAllFiles = keepAllFiles
self.finished = False
def run(self):
# make sure the temp file path does not exist
while True:
uniqueId = str(uuid.uuid4())
tempFilePath = os.path.join(self.relativePath, uniqueId)
if (not os.path.exists(tempFilePath)):
break
print ("starting download %s as %s " % (self.downloadUrl, tempFilePath))
response = requests.head(self.downloadUrl)
fileName = getFilenameFromHeaders(response.headers, self.downloadUrl)
fileTitle, fileExt = os.path.splitext(fileName)
self.fileTitle = fileTitle
filePath = os.path.join(self.relativePath, fileName)
request = requests.get(self.downloadUrl, stream = True)
# https://stackoverflow.com/questions/14014854/python-on-the-fly-md5-as-one-reads-a-stream
md5sig = hashlib.md5()
with open(tempFilePath, 'wb') as file:
for ch in request:
md5sig.update(ch)
file.write(ch)
# try to check if file is image to download
try:
fileMimetype = mimetypes.guess_type(fileName)[0]
if not ((fileMimetype.split("/")[0] in ["image"]) or (fileMimetype.split("/")[1] in ["pdf"])):
print ("unknown mimetpe %s of file %s / %s : removing file" % (str(fileMimetype), self.fileUrl, filePath) )
if (self.keepAllFiles):
os.remove(tempFilePath)
return
except:
print ("unable to guess mimetpe of %s / %s" % (self.fileUrl, filePath))
# check if we need to rename the file because we the file has the same name, but a different file content
if os.path.exists(filePath):
# check if the existing file has the same md5sig
existingMd5sig = hashlib.md5()
with open(filePath, 'rb') as existingFile:
for byte_block in iter(lambda: existingFile.read(4096),b""):
existingMd5sig.update(byte_block)
if (existingMd5sig.hexdigest() == md5sig.hexdigest()):
print ("duplicate detected '%s' md5sum of is equal, deleting downloaded file" % filePath)
else:
fileName = fileTitle + "_" + md5sig.hexdigest() + fileExt
# print ("existing file md5 %s differs from new file md5 %s" % (existingMd5sig.hexdigest(), md5sig.hexdigest()))
print ("name collision file '%s' already exists and md5 differs, using filename including md5sum as name '%s'" % (filePath, fileName))
filePath = os.path.join(self.relativePath, fileName)
if os.path.exists(filePath):
print ("file '%s' already exists, using equal md5 sum, assuming file already downloaded" % filePath)
os.remove(tempFilePath)
else:
print ("saving file as %s" % filePath)
os.rename(tempFilePath, filePath)
print ("finished download %s as %s" % (self.fileUrl, tempFilePath))
self.finalFilePath = filePath
self.finished = True
if __name__ == "__main__":
# if (len(sys.argv) < 2):
# print ("usage: python3 %s markdownFile.md ./MediaTargetFolder" % (sys.argv[0]))
# sys.exit()
parser = argparse.ArgumentParser(
description='Download linked images in Markdown File and generate new MD',
usage='%(prog)s file.md [file2.md ..] [-m Folder]')
# parser.add_argument('filename', type=str, nargs='+', help='markdown file[s]')
parser.add_argument('path', nargs='+', help='Path of a file or a folder of files.')
parser.add_argument("-m", "--media", help='specify folder for media downloads', required=False, default="./Media")
#parser.add_argument("-d", "--dummy", type=bool, help='dummy run (dont download files)', required=False)
parser.add_argument("-d", "--dummy", dest="dummyRun", default=False, action="store_true", help='Do not do anything really')
parser.add_argument("--maxThreads", dest="maxThreads", default=5, help='maximum number of threads to download files')
parser.add_argument("-k", "--keep", dest="keepAllFiles", default=False, action="store_true", help='keep all downloaded files, or delete unknown types after download')
args = parser.parse_args()
# read list of markdown files
full_paths = [os.path.normpath(os.path.join(os.getcwd(), path)) for path in args.path]
markdownFiles = set()
for path in full_paths:
if os.path.isfile(path):
markdownFiles.add(path)
else:
markdownFiles |= set(glob.glob(path + '/*' + '.md'))
# read media folder
mediaTargetFolder = args.media
# do a dummy run, not downloading or editing files
dummyRun = args.dummyRun
# maximum number of threads running to download files
maxThreads = max(args.maxThreads, 1)
# keep all files
keepAllFiles = args.keepAllFiles
# check media folder exists
if (not os.path.isdir(mediaTargetFolder)):
print ("Media folder %s does not exist" % mediaTargetFolder)
sys.exit()
print ("detected markdown files")
for filename in markdownFiles:
print (" %s" % filename)
yna = ""
while (yna not in ["y","n"]):
print ("handle all these files? y(es) / n(o)")
yna = getch().lower()
if (yna == "n"):
print ("aborted")
sys.exit()
# iterate all markdown files
for markdownFilename in markdownFiles:
print ("parsing %s" % markdownFilename)
urlsInMarkDownFile = getUrlsInMarkdown(markdownFilename)
urlsInMarkDownFile = list(set(urlsInMarkDownFile))
if (len(urlsInMarkDownFile) == 0):
print ("no downloadable urls found in %s" % markdownFilename)
else:
runningThreads = []
finishedThreads = []
if (dummyRun):
for s in urlsInMarkDownFile:
print ("file %s" % s)
else:
# while remaining files to download, any running download threads
while ((len(urlsInMarkDownFile) > 0) or (len(runningThreads) > 0)):
# files remaining to download and not maximum number of threads running
while (len(urlsInMarkDownFile) > 0) and (len(runningThreads) < maxThreads):
url = urlsInMarkDownFile.pop()
thread = Downloader(url, mediaTargetFolder, keepAllFiles)
thread.start()
runningThreads.append(thread)
print ("threads: remaining %i running %i finished %i (%s)" % (len(urlsInMarkDownFile), len(runningThreads), len(finishedThreads), ", ".join(i.fileTitle for i in runningThreads)))
time.sleep(1)
# check if thread finished, move to finishedthreads
for i in range(len(runningThreads)-1, -1, -1):
if (runningThreads[i].finished):
thread = runningThreads.pop(i)
thread.join()
finishedThreads.append(thread)
# for i in range(len(urlsInMarkDownFile)):
# thread = Downloader(urlsInMarkDownFile[i], mediaTargetFolder)
# thread.start()
# threads.append(thread)
replacements = {}
for thread in finishedThreads:
# thread.join()
# only if path has changed (othervise it has been skipped or deleted)
if (thread.fileUrl != thread.finalFilePath):
print ("saved %s as %s"% (thread.fileUrl, thread.finalFilePath))
replacements[thread.fileUrl] = thread.finalFilePath
# make sure we dont use a temp filename that already exists
while True:
markdownTempFilename = str(uuid.uuid4()) + ".md"
if (not os.path.exists(markdownTempFilename)):
break
print ("saving new temporary markdownfile with replaced links as %s" % markdownTempFilename)
with open(markdownTempFilename, 'w') as fin:
with open(markdownFilename, 'r') as ini:
fin.write(replacemany(replacements, ini.read()))
markdownFileTitle, markdownFileExt = os.path.splitext(markdownFilename)
# rename markdownTempFilename to markdownFilename-localMedia-X.md which does not exists
newMarkdownFilename = "%s-localMedia.md" % markdownFileTitle
newMarkdownFilenameIndex = 0
while (os.path.exists(newMarkdownFilename)):
newMarkdownFilenameIndex += 1
newMarkdownFilename = "%s-localMedia-%i.md" % (markdownFileTitle, newMarkdownFilenameIndex)
print ("rename temporary markdownfile %s as %s" % (markdownTempFilename, newMarkdownFilename))
os.rename(markdownTempFilename, newMarkdownFilename)
| 43.066038 | 198 | 0.601022 |
14b77b1df6d3015b8155039d6af48da5a868b289 | 1,242 | py | Python | DeleteBrowserHistory/runner.py | sabyasachisome/Delete-Browser-Hist | 6cb647e2609ecd6236b3d5fb2789086c3837c6ee | [
"MIT"
] | null | null | null | DeleteBrowserHistory/runner.py | sabyasachisome/Delete-Browser-Hist | 6cb647e2609ecd6236b3d5fb2789086c3837c6ee | [
"MIT"
] | null | null | null | DeleteBrowserHistory/runner.py | sabyasachisome/Delete-Browser-Hist | 6cb647e2609ecd6236b3d5fb2789086c3837c6ee | [
"MIT"
] | null | null | null | from FetchData import fetchData as fetch
from TransactData import transactions as transact
from sys import exit
if __name__=="__main__":
try:
# get connection object
configObj=fetch.getConfig()
con,cur=fetch.getConnection(configObj)
# get table and where condition
table,whereCond=fetch.getDbDetails(configObj)
# print(table,whereCond)
# get the data from table
df1=fetch.readData(con,table,whereCond)
if len(df1.index)==0:
exit('Dataframe is empty')
else:
# df1.head(2)
df2=fetch.convertColumns(df1,'last_visit_time')
# df2.head(4)
# refiltering the data, in case data pulled in df2 was not filtered, for safety
# df3=fetch.filterData(df2,configObj)
# df3.head(2)
# df3[df3['url'].str.contains('FACEBOOK',case=False, regex=True)]
# deleting records finally
recordsDeleted= transact.deleteHistory(cur,con,table,whereCond)
print(recordsDeleted," records have been deleted")
except Exception as e:
print("Error in execution")
print(e) | 34.5 | 92 | 0.588567 |
82edeefed475816cffb1ae1d250af3a218ccb8a6 | 3,109 | py | Python | common/rl/envs/taxi.py | rotem94/MCTS-T- | b15a1e3ac4210fa74d3c3d1f8f31a03b02b6eb91 | [
"MIT"
] | 8 | 2018-07-18T05:43:18.000Z | 2022-02-08T16:12:17.000Z | common/rl/envs/taxi.py | rotem94/MCTS-T- | b15a1e3ac4210fa74d3c3d1f8f31a03b02b6eb91 | [
"MIT"
] | 1 | 2022-02-06T12:16:09.000Z | 2022-02-07T09:41:19.000Z | src/rl/envs/taxi.py | tmoer/cursus | 62c4aa793205294d5b3a99d192e9b6311f4d34a6 | [
"MIT"
] | 4 | 2018-05-29T23:32:40.000Z | 2021-05-22T13:40:55.000Z | # -*- coding: utf-8 -*-
"""
Taxi Env
@author: thomas
"""
import numpy
import random
import gym
class Taxi():
''' '''
def __init__(self):
self.size = numpy.array([4,4])
self.landmarks = numpy.array([[0.0, 0.0], [0.0, 4.0], [3.0, 0.0], [4.0, 4.0]])
self.walls = numpy.array([[1.0, 2.0], [2.0, -2.0], [3.0, 2.0]])
self.fuel = 0
self.fuel_loc = numpy.array([2.0, 1.0])
self.pass_loc = 0 # Passenger location: -1 for in taxi, >= 0 for a landmark
self.pass_dest = 0 # Passenger destination: >=0 for a landmark
self.pos = numpy.zeros((2,))
self.observation_space = gym.spaces.Box(0,12,(5,))
self.action_space = gym.spaces.Discrete(6)
def reset(self):
self.pos = numpy.random.randint(0,5,(2,))
self.fuel = numpy.random.random()*7 + 5.0
self.lm_list = [i for i in range(len(self.landmarks))]
random.shuffle(self.lm_list)
self.pass_loc = self.lm_list.pop()
self.pass_dest = random.choice(self.lm_list)
return self.get_state()
def get_state(self):
return numpy.hstack([self.pos,self.fuel,self.pass_loc,self.pass_dest])
def step(self,action):
# move taxi
reward = self.takeAction(action)
terminal = 1 if self.isAtGoal() or (self.fuel_loc is not None and self.fuel) < 0 else 0
return self.get_state(),reward,terminal,{}
def takeAction(self, intAction):
reward = -1.0
self.fuel -= 1
prev_pos = self.pos.copy()
sign = 0
if intAction == 0:
self.pos[0] += 1.0
sign = 1
elif intAction == 1:
self.pos[0] -= 1.0
sign = -1
elif intAction == 2:
self.pos[1] += 1.0
elif intAction == 3:
self.pos[1] -= 1.0
elif intAction == 4: # Pickup
if self.pass_loc >= 0 and self.atPoint(self.landmarks[self.pass_loc]):
self.pass_loc = -1
else:
reward = -10.0
elif intAction == 5: # Drop off
if self.pass_loc == -1 and self.atPoint(self.landmarks[self.pass_dest]):
self.pass_loc = self.pass_dest
reward = 20.0
else:
reward = -10.0
elif self.fuel_loc is not None and intAction == 4: # Refuel
if self.atPoint(self.fuel_loc):
self.fuel = 12.0
self.pos = self.pos.clip([0, 0], self.size)
if sign != 0 and self.hitsWall(prev_pos, self.pos, sign):
self.pos[0] = prev_pos[0] # Only revert the x-coord, to allow noise and such in y
return reward
# helpers
def atPoint(self, point):
return numpy.linalg.norm(self.pos - point) < 0.1
def isAtGoal(self):
return self.pass_loc == self.pass_dest
def hitsWall(self, old_pos, new_pos, sign):
return (((self.walls[:,0]*sign >= old_pos[0]*sign) & (self.walls[:,0]*sign < new_pos[0]*sign)) \
& ((self.walls[:,1] > old_pos[1]) | ((self.size[1]-1)+self.walls[:,1] < old_pos[1]))).any()
| 33.793478 | 104 | 0.543905 |
a2573ed1055137ade45e2117fbff8e8296fe7314 | 127 | py | Python | weatherapp/admin.py | Ashira66/OWM_weatherapp | d8f50d8e97f255dd563182153206e29f8c15c2ed | [
"MIT"
] | null | null | null | weatherapp/admin.py | Ashira66/OWM_weatherapp | d8f50d8e97f255dd563182153206e29f8c15c2ed | [
"MIT"
] | null | null | null | weatherapp/admin.py | Ashira66/OWM_weatherapp | d8f50d8e97f255dd563182153206e29f8c15c2ed | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import weatherdata
admin.site.register(weatherdata)
# Register your models here.
| 25.4 | 32 | 0.826772 |
27f66b3652c73df35642512455591c7046793b31 | 8,434 | py | Python | libs/utils/augmentation.py | NinV/facial-landmark-detection | 45fc33ed83e3fd8793cf3a87b796940f9b6522aa | [
"MIT"
] | 1 | 2021-12-18T14:46:41.000Z | 2021-12-18T14:46:41.000Z | libs/utils/augmentation.py | NinV/facial-landmark-detection | 45fc33ed83e3fd8793cf3a87b796940f9b6522aa | [
"MIT"
] | null | null | null | libs/utils/augmentation.py | NinV/facial-landmark-detection | 45fc33ed83e3fd8793cf3a87b796940f9b6522aa | [
"MIT"
] | null | null | null | import random
import numpy as np
import cv2
MATCHED_PARTS = {
"300W": ([1, 17], [2, 16], [3, 15], [4, 14], [5, 13], [6, 12], [7, 11], [8, 10],
[18, 27], [19, 26], [20, 25], [21, 24], [22, 23],
[32, 36], [33, 35],
[37, 46], [38, 45], [39, 44], [40, 43], [41, 48], [42, 47],
[49, 55], [50, 54], [51, 53], [62, 64], [61, 65], [68, 66], [59, 57], [60, 56]),
"AFLW": ([1, 6], [2, 5], [3, 4],
[7, 12], [8, 11], [9, 10],
[13, 15],
[16, 18]),
"COFW": ([1, 2], [5, 7], [3, 4], [6, 8], [9, 10], [11, 12], [13, 15], [17, 18], [14, 16], [19, 20], [23, 24]),
"WFLW": ([0, 32], [1, 31], [2, 30], [3, 29], [4, 28], [5, 27], [6, 26], [7, 25], [8, 24], [9, 23], [10, 22],
[11, 21], [12, 20], [13, 19], [14, 18], [15, 17], # check
[33, 46], [34, 45], [35, 44], [36, 43], [37, 42], [38, 50], [39, 49], [40, 48], [41, 47], # elbrow
[60, 72], [61, 71], [62, 70], [63, 69], [64, 68], [65, 75], [66, 74], [67, 73],
[55, 59], [56, 58],
[76, 82], [77, 81], [78, 80], [87, 83], [86, 84],
[88, 92], [89, 91], [95, 93], [96, 97])}
num_classes = {"300W": 68, "AFLW": 19, "COFW": 29, "WFLW": 98}
class HorizontalFlip:
def __init__(self):
pass
@staticmethod
def get_transformation_matrix(img_size):
iw, ih = img_size
T = np.array([[-1, 0, iw],
[0, 1, 0],
[0, 0, 1]])
return T
class SequentialTransform:
def __init__(self, geometric_transforms, geometric_transform_prob,
color_distortions, color_distortions_prob, out_size,
shuffle=True, color_mode='bgr', interpolation=cv2.INTER_AREA,
border_mode=cv2.BORDER_CONSTANT, border_value=(114, 114, 114),
flip_point_pairs="WFLW"):
self.geometric_transforms = geometric_transforms
self.geometric_transform_prob = geometric_transform_prob
self.color_distortions = color_distortions
self.color_distortions_prob = color_distortions_prob
self.out_size = out_size
self.shuffle = shuffle
self.color_mode = color_mode
self.interpolation = interpolation
self.border_mode = border_mode
self.border_value = border_value
if flip_point_pairs=="WFLW":
self.flip_matrix = np.identity(num_classes["WFLW"])
for i, j in MATCHED_PARTS["WFLW"]:
self.flip_matrix[i, i] = 0
self.flip_matrix[j, j] = 0
self.flip_matrix[i, j] = 1
self.flip_matrix[j, i] = 1
elif flip_point_pairs in ("300W", "AFLW", "COFW"):
self.flip_matrix = np.identity(num_classes[flip_point_pairs])
for i, j in MATCHED_PARTS[flip_point_pairs]:
self.flip_matrix[i-1, i-1] = 0
self.flip_matrix[j-1, j-1] = 0
self.flip_matrix[i-1, j-1] = 1
self.flip_matrix[j-1, i-1] = 1
self.flip_point = False
def _get_transformation_matrix(self, img_size):
if self.shuffle:
temp = list(zip(self.geometric_transforms, self.geometric_transform_prob))
random.shuffle(temp)
self.geometric_transforms, self.geometric_transform_prob = zip(*temp)
w, h = img_size
T = np.identity(3)
for transform, prob in zip(self.geometric_transforms, self.geometric_transform_prob):
if random.random() < prob:
T = np.matmul(transform.get_transformation_matrix((w, h)), T)
if isinstance(transform, HorizontalFlip):
self.flip_point = True
return T
def transform(self, image: np.ndarray, points=None):
"""
:param image: numpy array
:param points: [[x1, y1], [x2, y2], ...,[xn, yn]].
:return:
"""
h, w = image.shape[:2]
T = self._get_transformation_matrix(img_size=(w, h))
out = cv2.warpPerspective(image.copy(), T, self.out_size, None,
self.interpolation, self.border_mode, self.border_value)
if points is not None:
points = np.array(points, dtype=np.float)
# convert to homogeneous coordinates
if points.shape[1] == 2:
nums = points.shape[0]
points = np.hstack((points, np.ones((nums, 1), dtype=np.float)))
points = np.matmul(T, points.T).T
points = points[:, :2]
if self.flip_point:
points = np.matmul(self.flip_matrix, points)
self.flip_point = False
for color_distortion, prob in zip(self.color_distortions, self.color_distortions_prob):
if random.random() < prob:
out = color_distortion.random_distort(out, self.color_mode)
return out, points
class ColorDistortion:
def __init__(self, hue=0.2, saturation=1.5, exposure=1.5):
self.hue = hue
self.saturation = saturation
self.exposure = exposure
def random_distort(self, image, mode="bgr"):
if mode == "bgr":
flag_to_hsv = cv2.COLOR_BGR2HSV
flag_from_hsv = cv2.COLOR_HSV2BGR
elif mode == "rgb":
flag_to_hsv = cv2.COLOR_RGB2HSV
flag_from_hsv = cv2.COLOR_HSV2RGB
else:
raise ValueError("unrecognised color mode {}".format(mode))
dhue = np.random.uniform(-self.hue, self.hue)
dsat = self._rand_scale(self.saturation)
dval = self._rand_scale(self.exposure)
image_hsv = cv2.cvtColor(image, flag_to_hsv)
image_hsv[:, :, 1] = cv2.multiply(image_hsv[:, :, 1], dsat)
image_hsv[:, :, 2] = cv2.multiply(image_hsv[:, :, 2], dval)
image_hsv = cv2.add(image_hsv, dhue)
return cv2.cvtColor(image_hsv, flag_from_hsv)
@staticmethod
def _rand_scale(s):
scale = np.random.uniform(1, s)
if np.random.uniform(0, 1) < 0.5:
return scale
return 1 / scale
class GaussianBlur:
def __init__(self, prob=0.5, ksize=(5, 5)):
self.prob = prob
self.ksize = ksize
def random_distort(self, image, mode="bgr"):
if random.random() < self.prob:
return cv2.GaussianBlur(image, self.ksize, 0)
return image
class RandomTranslation:
def __init__(self, tx_range, ty_range):
self._validate_input(tx_range, ty_range)
self.tx_range = tx_range
self.ty_range = ty_range
def _validate_input(self, *args):
for arg in args:
if len(arg) != 2:
raise ValueError("Both tx_range and ty_range must have length of 2")
min_value = min(arg)
max_value = max(arg)
if min_value < -1.:
raise ValueError("translation range must not < -1")
if max_value > 1.:
raise ValueError("translation range must not > 1")
def get_transformation_matrix(self, img_size):
iw, ih = img_size
tx = random.uniform(*self.tx_range) * iw
ty = random.uniform(*self.tx_range) * ih
T = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
return T
class RandomScalingAndRotation:
def __init__(self, angle_range, scale_range):
"""
:param angle_range: angle range in degree
:param scale_range: scale range
:param center: center point. Default: (0, 0)
"""
self._validate_input(angle_range, scale_range)
self.angle_range = angle_range
self.scale_range = scale_range
def _validate_input(self, *args):
angle_range, scale_range = args
if len(angle_range) != 2:
raise ValueError("angle_range must have length of 2")
if len(scale_range) != 2:
raise ValueError("scale_range must have length of 2")
for value in scale_range:
if value < 0:
raise ValueError("scale_range must not < 0")
def get_transformation_matrix(self, img_size):
iw, ih = img_size
center = iw / 2, ih / 2
angle = random.uniform(*self.angle_range)
scale = random.uniform(*self.scale_range)
T = cv2.getRotationMatrix2D(center, angle, scale)
T = np.vstack((T, np.array([[0, 0, 1]])))
return T
| 37.484444 | 114 | 0.54968 |
3092a7b20c06aa13ee0a1429480175ac5d032095 | 1,708 | py | Python | tests/unit/test_models/test_full_battery_models/test_lithium_ion/base_lithium_ion_half_cell_tests.py | manjunathnilugal/PyBaMM | 65d5cba534b4f163670e753714964aaa75d6a2d2 | [
"BSD-3-Clause"
] | 1 | 2021-11-10T14:37:30.000Z | 2021-11-10T14:37:30.000Z | tests/unit/test_models/test_full_battery_models/test_lithium_ion/base_lithium_ion_half_cell_tests.py | manjunathnilugal/PyBaMM | 65d5cba534b4f163670e753714964aaa75d6a2d2 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/test_models/test_full_battery_models/test_lithium_ion/base_lithium_ion_half_cell_tests.py | manjunathnilugal/PyBaMM | 65d5cba534b4f163670e753714964aaa75d6a2d2 | [
"BSD-3-Clause"
] | 1 | 2020-11-04T12:32:52.000Z | 2020-11-04T12:32:52.000Z | #
# Base unit tests for lithium-ion half-cell models
# This is achieved by using the {"working electrdode": "positive"} option
#
class BaseUnitTestLithiumIonHalfCell:
def check_well_posedness(self, options):
if self.model is not None:
options["working electrode"] = "positive"
model = self.model(options)
model.check_well_posedness()
def test_well_posed_sei(self):
options = {}
self.check_well_posedness(options)
def test_well_posed_constant_utilisation(self):
options = {"interface utilisation": "constant"}
self.check_well_posedness(options)
def test_well_posed_current_driven_utilisation(self):
options = {"interface utilisation": "current-driven"}
self.check_well_posedness(options)
def test_well_posed_constant_sei(self):
options = {"SEI": "constant"}
self.check_well_posedness(options)
def test_well_posed_reaction_limited_sei(self):
options = {"SEI": "reaction limited"}
self.check_well_posedness(options)
def test_well_posed_solvent_diffusion_limited_sei(self):
options = {"SEI": "solvent-diffusion limited"}
self.check_well_posedness(options)
def test_well_posed_electron_migration_limited_sei(self):
options = {"SEI": "electron-migration limited"}
self.check_well_posedness(options)
def test_well_posed_interstitial_diffusion_limited_sei(self):
options = {"SEI": "interstitial-diffusion limited"}
self.check_well_posedness(options)
def test_well_posed_ec_reaction_limited_sei(self):
options = {"SEI": "ec reaction limited"}
self.check_well_posedness(options)
| 34.857143 | 73 | 0.703162 |
55556b42557864019fdefdf7bde8084f099a321c | 2,021 | py | Python | openstack/network/v2/vpn_service.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | null | null | null | openstack/network/v2/vpn_service.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | null | null | null | openstack/network/v2/vpn_service.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.network import network_service
from openstack import resource2 as resource
# NOTE: The VPN service is unmaintained, need to consider remove it
class VPNService(resource.Resource):
resource_key = 'vpnservice'
resources_key = 'vpnservices'
base_path = '/vpn/vpnservices'
service = network_service.NetworkService()
# capabilities
allow_create = True
allow_get = True
allow_update = True
allow_delete = True
allow_list = True
# Properties
#: Human-readable description for the vpnservice.
description = resource.Body('description')
#: The external IPv4 address that is used for the VPN service.
external_v4_ip = resource.Body('external_v4_ip')
#: The external IPv6 address that is used for the VPN service.
external_v6_ip = resource.Body('external_v6_ip')
#: The administrative state of the vpnservice, which is up ``True`` or
#: down ``False``. *Type: bool*
is_admin_state_up = resource.Body('admin_state_up', type=bool)
#: The vpnservice name.
name = resource.Body('name')
#: ID of the router into which the VPN service is inserted.
router_id = resource.Body('router_id')
#: The ID of the project this vpnservice is associated with.
project_id = resource.Body('tenant_id')
#: The vpnservice status.
status = resource.Body('status')
#: The ID of the subnet on which the tenant wants the vpnservice.
subnet_id = resource.Body('subnet_id')
| 38.865385 | 75 | 0.724394 |
c8f5a380fb962e4d3a36ddca554eb3e24c5c85d2 | 494 | py | Python | code/1641.Count-Sorted-Vowel-Strings.py | Aden-Q/leetcode | ebd4804edd4f172b9981b22c18d9ff654cf20762 | [
"Apache-2.0"
] | 1 | 2019-09-22T03:08:14.000Z | 2019-09-22T03:08:14.000Z | code/1641.Count-Sorted-Vowel-Strings.py | Aden-Q/leetcode | ebd4804edd4f172b9981b22c18d9ff654cf20762 | [
"Apache-2.0"
] | null | null | null | code/1641.Count-Sorted-Vowel-Strings.py | Aden-Q/leetcode | ebd4804edd4f172b9981b22c18d9ff654cf20762 | [
"Apache-2.0"
] | null | null | null | class Solution:
def countVowelStrings(self, n: int) -> int:
d = ['a', 'e', 'i', 'o', 'u']
path = []
count = 0
def backtracking(n, start):
nonlocal count
if n == 0:
count += 1
return
for i in range(start, 5):
path.append(d[i])
backtracking(n-1, i)
path.pop()
return
backtracking(n, 0)
return count | 26 | 47 | 0.388664 |
7620946ff1f06bc90028397e494e33e11ebe790e | 65 | py | Python | workflows/Demo_DtoxS/widgets/Demo_DtoxS/__init__.py | rgschmitz1/BioDepot-workflow-builder | f74d904eeaf91ec52ec9b703d9fb38e9064e5a66 | [
"MIT"
] | 54 | 2017-01-08T17:21:49.000Z | 2021-11-02T08:46:07.000Z | workflows/Demo_DtoxS/widgets/Demo_DtoxS/__init__.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 22 | 2017-03-28T06:03:14.000Z | 2021-07-28T05:43:55.000Z | workflows/Demo_DtoxS/widgets/Demo_DtoxS/__init__.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 21 | 2017-01-26T21:12:09.000Z | 2022-01-31T21:34:59.000Z | import sysconfig
ICON = "icon/DToxS.png"
BACKGROUND = "#9dffcb"
| 13 | 23 | 0.723077 |
fd625dc366f2d0648269163c6e120944258050f2 | 14,075 | py | Python | pyleecan/GUI/Dxf/Ui_DXF_Slot.py | carbon-drive/pyleecan | e89d4fe97f23f6182c19127d2c6a2133614e169d | [
"Apache-2.0"
] | 1 | 2021-07-08T01:27:24.000Z | 2021-07-08T01:27:24.000Z | pyleecan/GUI/Dxf/Ui_DXF_Slot.py | ecs-kev/pyleecan | 1faedde4b24acc6361fa1fdd4e980eaec4ca3a62 | [
"Apache-2.0"
] | null | null | null | pyleecan/GUI/Dxf/Ui_DXF_Slot.py | ecs-kev/pyleecan | 1faedde4b24acc6361fa1fdd4e980eaec4ca3a62 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# File generated according to DXF_Slot.ui
# WARNING! All changes made in this file will be lost!
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
from ...GUI.Tools.WPathSelector.WPathSelector import WPathSelector
from ...GUI.Tools.MPLCanvas import MPLCanvas2
from ...GUI.Tools.FloatEdit import FloatEdit
from pyleecan.GUI.Resources import pyleecan_rc
class Ui_DXF_Slot(object):
def setupUi(self, DXF_Slot):
if not DXF_Slot.objectName():
DXF_Slot.setObjectName(u"DXF_Slot")
DXF_Slot.resize(745, 551)
icon = QIcon()
icon.addFile(
u":/images/images/icon/pyleecan_64.png", QSize(), QIcon.Normal, QIcon.Off
)
DXF_Slot.setWindowIcon(icon)
self.horizontalLayout_3 = QHBoxLayout(DXF_Slot)
self.horizontalLayout_3.setObjectName(u"horizontalLayout_3")
self.verticalLayout_2 = QVBoxLayout()
self.verticalLayout_2.setObjectName(u"verticalLayout_2")
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.b_reset = QPushButton(DXF_Slot)
self.b_reset.setObjectName(u"b_reset")
self.horizontalLayout.addWidget(self.b_reset)
self.b_cancel = QPushButton(DXF_Slot)
self.b_cancel.setObjectName(u"b_cancel")
self.horizontalLayout.addWidget(self.b_cancel)
self.b_tuto = QPushButton(DXF_Slot)
self.b_tuto.setObjectName(u"b_tuto")
self.b_tuto.setEnabled(False)
self.horizontalLayout.addWidget(self.b_tuto)
self.horizontalSpacer_2 = QSpacerItem(
40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum
)
self.horizontalLayout.addItem(self.horizontalSpacer_2)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.w_viewer = MPLCanvas2(DXF_Slot)
self.w_viewer.setObjectName(u"w_viewer")
self.verticalLayout_2.addWidget(self.w_viewer)
self.textBrowser = QTextBrowser(DXF_Slot)
self.textBrowser.setObjectName(u"textBrowser")
self.textBrowser.setMaximumSize(QSize(16777215, 200))
self.verticalLayout_2.addWidget(self.textBrowser)
self.horizontalLayout_3.addLayout(self.verticalLayout_2)
self.widget = QWidget(DXF_Slot)
self.widget.setObjectName(u"widget")
self.widget.setMaximumSize(QSize(400, 16777215))
self.verticalLayout = QVBoxLayout(self.widget)
self.verticalLayout.setObjectName(u"verticalLayout")
self.w_path_selector = WPathSelector(self.widget)
self.w_path_selector.setObjectName(u"w_path_selector")
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.w_path_selector.sizePolicy().hasHeightForWidth()
)
self.w_path_selector.setSizePolicy(sizePolicy)
self.verticalLayout.addWidget(self.w_path_selector)
self.gridLayout = QGridLayout()
self.gridLayout.setObjectName(u"gridLayout")
self.in_Zs = QLabel(self.widget)
self.in_Zs.setObjectName(u"in_Zs")
self.gridLayout.addWidget(self.in_Zs, 0, 0, 1, 1)
self.si_Zs = QSpinBox(self.widget)
self.si_Zs.setObjectName(u"si_Zs")
sizePolicy1 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.si_Zs.sizePolicy().hasHeightForWidth())
self.si_Zs.setSizePolicy(sizePolicy1)
self.si_Zs.setMinimum(1)
self.si_Zs.setMaximum(1000)
self.si_Zs.setSingleStep(0)
self.si_Zs.setValue(36)
self.gridLayout.addWidget(self.si_Zs, 0, 1, 1, 1)
self.in_wind_begin_index = QLabel(self.widget)
self.in_wind_begin_index.setObjectName(u"in_wind_begin_index")
self.gridLayout.addWidget(self.in_wind_begin_index, 1, 0, 1, 1)
self.si_wind_begin_index = QSpinBox(self.widget)
self.si_wind_begin_index.setObjectName(u"si_wind_begin_index")
self.gridLayout.addWidget(self.si_wind_begin_index, 1, 1, 1, 1)
self.in_wind_end_index = QLabel(self.widget)
self.in_wind_end_index.setObjectName(u"in_wind_end_index")
self.gridLayout.addWidget(self.in_wind_end_index, 2, 0, 1, 1)
self.si_wind_end_index = QSpinBox(self.widget)
self.si_wind_end_index.setObjectName(u"si_wind_end_index")
self.gridLayout.addWidget(self.si_wind_end_index, 2, 1, 1, 1)
self.in_type_line = QLabel(self.widget)
self.in_type_line.setObjectName(u"in_type_line")
self.gridLayout.addWidget(self.in_type_line, 3, 0, 1, 1)
self.c_type_line = QComboBox(self.widget)
self.c_type_line.addItem("")
self.c_type_line.addItem("")
self.c_type_line.setObjectName(u"c_type_line")
self.gridLayout.addWidget(self.c_type_line, 3, 1, 1, 1)
self.in_axe_angle = QLabel(self.widget)
self.in_axe_angle.setObjectName(u"in_axe_angle")
self.gridLayout.addWidget(self.in_axe_angle, 4, 0, 1, 1)
self.lf_axe_angle = FloatEdit(self.widget)
self.lf_axe_angle.setObjectName(u"lf_axe_angle")
sizePolicy2 = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
sizePolicy2.setHorizontalStretch(0)
sizePolicy2.setVerticalStretch(0)
sizePolicy2.setHeightForWidth(
self.lf_axe_angle.sizePolicy().hasHeightForWidth()
)
self.lf_axe_angle.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lf_axe_angle, 4, 1, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.horizontalLayout_5 = QHBoxLayout()
self.horizontalLayout_5.setObjectName(u"horizontalLayout_5")
self.horizontalLayout_5.setSizeConstraint(QLayout.SetDefaultConstraint)
self.in_coord_center = QLabel(self.widget)
self.in_coord_center.setObjectName(u"in_coord_center")
self.horizontalLayout_5.addWidget(self.in_coord_center)
self.lf_center_x = FloatEdit(self.widget)
self.lf_center_x.setObjectName(u"lf_center_x")
sizePolicy3 = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy3.setHorizontalStretch(0)
sizePolicy3.setVerticalStretch(0)
sizePolicy3.setHeightForWidth(self.lf_center_x.sizePolicy().hasHeightForWidth())
self.lf_center_x.setSizePolicy(sizePolicy3)
self.horizontalLayout_5.addWidget(self.lf_center_x)
self.lf_center_y = FloatEdit(self.widget)
self.lf_center_y.setObjectName(u"lf_center_y")
self.lf_center_y.setEnabled(True)
sizePolicy3.setHeightForWidth(self.lf_center_y.sizePolicy().hasHeightForWidth())
self.lf_center_y.setSizePolicy(sizePolicy3)
self.lf_center_y.setMaximumSize(QSize(137, 16777215))
self.horizontalLayout_5.addWidget(self.lf_center_y)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.verticalSpacer = QSpacerItem(
20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding
)
self.verticalLayout.addItem(self.verticalSpacer)
self.horizontalLayout_4 = QHBoxLayout()
self.horizontalLayout_4.setObjectName(u"horizontalLayout_4")
self.in_scaling = QLabel(self.widget)
self.in_scaling.setObjectName(u"in_scaling")
self.horizontalLayout_4.addWidget(self.in_scaling)
self.lf_scaling = FloatEdit(self.widget)
self.lf_scaling.setObjectName(u"lf_scaling")
sizePolicy3.setHeightForWidth(self.lf_scaling.sizePolicy().hasHeightForWidth())
self.lf_scaling.setSizePolicy(sizePolicy3)
self.horizontalLayout_4.addWidget(self.lf_scaling)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.horizontalLayout_2 = QHBoxLayout()
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.horizontalLayout_2.setSizeConstraint(QLayout.SetFixedSize)
self.horizontalSpacer = QSpacerItem(
40, 20, QSizePolicy.Fixed, QSizePolicy.Minimum
)
self.horizontalLayout_2.addItem(self.horizontalSpacer)
self.b_plot = QPushButton(self.widget)
self.b_plot.setObjectName(u"b_plot")
self.horizontalLayout_2.addWidget(self.b_plot)
self.b_save = QPushButton(self.widget)
self.b_save.setObjectName(u"b_save")
self.horizontalLayout_2.addWidget(self.b_save)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout_3.addWidget(self.widget)
self.retranslateUi(DXF_Slot)
QMetaObject.connectSlotsByName(DXF_Slot)
# setupUi
def retranslateUi(self, DXF_Slot):
DXF_Slot.setWindowTitle(
QCoreApplication.translate("DXF_Slot", u"Define Slot from DXF", None)
)
self.b_reset.setText(
QCoreApplication.translate("DXF_Slot", u"Reset View", None)
)
self.b_cancel.setText(
QCoreApplication.translate("DXF_Slot", u"Cancel Selection", None)
)
self.b_tuto.setText(
QCoreApplication.translate("DXF_Slot", u"Open Tutorial", None)
)
self.textBrowser.setHtml(
QCoreApplication.translate(
"DXF_Slot",
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">\n'
'<html><head><meta name="qrichtext" content="1" /><style type="text/css">\n'
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:7.8pt; font-weight:400; font-style:normal;\">\n"
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">1) Select DXF file in [m] (or use scaling factor), spline won\'t be displayed</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">2) Use mouse wheel to zoom in/out</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">3) Click on lines and arc'
"s to draw the contour of a single slot</span></p>\n"
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">4) First point and last point must be on the bore radius (must match the lamination radius)</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">5) The winding area is define by a part of the slot contour and a closing line:</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">- The points are ordered in trigonometrical order (from bore radius to bore radius)</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" '
'font-size:12pt;">- First point index is 0</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">- Closing line can be either a Segment or an Arc (center 0)</span></p>\n'
'<p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:12pt;">6) Plot to check and save</span></p></body></html>',
None,
)
)
self.in_Zs.setText(
QCoreApplication.translate("DXF_Slot", u"Number of slot", None)
)
self.in_wind_begin_index.setText(
QCoreApplication.translate("DXF_Slot", u"Index start of winding", None)
)
self.in_wind_end_index.setText(
QCoreApplication.translate("DXF_Slot", u"Index end of winding", None)
)
self.in_type_line.setText(
QCoreApplication.translate("DXF_Slot", u"Type closing line", None)
)
self.c_type_line.setItemText(
0, QCoreApplication.translate("DXF_Slot", u"Segment", None)
)
self.c_type_line.setItemText(
1, QCoreApplication.translate("DXF_Slot", u"Arc1", None)
)
self.in_axe_angle.setText(
QCoreApplication.translate("DXF_Slot", u"Slot axe angle shift", None)
)
self.lf_axe_angle.setText(QCoreApplication.translate("DXF_Slot", u"0", None))
self.in_coord_center.setText(
QCoreApplication.translate("DXF_Slot", u"Machine center (x,y)", None)
)
self.lf_center_x.setText(QCoreApplication.translate("DXF_Slot", u"0", None))
self.lf_center_y.setText(QCoreApplication.translate("DXF_Slot", u"0", None))
self.in_scaling.setText(
QCoreApplication.translate("DXF_Slot", u"Scaling factor", None)
)
self.lf_scaling.setText(QCoreApplication.translate("DXF_Slot", u"1", None))
self.b_plot.setText(QCoreApplication.translate("DXF_Slot", u"Plot", None))
self.b_save.setText(QCoreApplication.translate("DXF_Slot", u"Save", None))
# retranslateUi
| 45.257235 | 288 | 0.676661 |
1c10f930406d0d842bde7039b462cbd6bd42d3c5 | 1,509 | py | Python | sympy/polys/benchmarks/bench_galoispolys.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | sympy/polys/benchmarks/bench_galoispolys.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | sympy/polys/benchmarks/bench_galoispolys.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | """Benchmarks for polynomials over Galois fields. """
from __future__ import print_function, division
from sympy.polys.galoistools import gf_from_dict, gf_factor_sqf
from sympy.polys.domains import ZZ
from sympy import pi, nextprime
def gathen_poly(n, p, K):
return gf_from_dict({n: K.one, 1: K.one, 0: K.one}, p, K)
def shoup_poly(n, p, K):
f = [K.one] * (n + 1)
for i in range(1, n + 1):
f[i] = (f[i - 1] ** 2 + K.one) % p
return f
def genprime(n, K):
return K(nextprime(int((2 ** n * pi).evalf())))
p_10 = genprime(10, ZZ)
f_10 = gathen_poly(10, p_10, ZZ)
p_20 = genprime(20, ZZ)
f_20 = gathen_poly(20, p_20, ZZ)
def timeit_gathen_poly_f10_zassenhaus():
gf_factor_sqf(f_10, p_10, ZZ, method="zassenhaus")
def timeit_gathen_poly_f10_shoup():
gf_factor_sqf(f_10, p_10, ZZ, method="shoup")
def timeit_gathen_poly_f20_zassenhaus():
gf_factor_sqf(f_20, p_20, ZZ, method="zassenhaus")
def timeit_gathen_poly_f20_shoup():
gf_factor_sqf(f_20, p_20, ZZ, method="shoup")
P_08 = genprime(8, ZZ)
F_10 = shoup_poly(10, P_08, ZZ)
P_18 = genprime(18, ZZ)
F_20 = shoup_poly(20, P_18, ZZ)
def timeit_shoup_poly_F10_zassenhaus():
gf_factor_sqf(F_10, P_08, ZZ, method="zassenhaus")
def timeit_shoup_poly_F10_shoup():
gf_factor_sqf(F_10, P_08, ZZ, method="shoup")
def timeit_shoup_poly_F20_zassenhaus():
gf_factor_sqf(F_20, P_18, ZZ, method="zassenhaus")
def timeit_shoup_poly_F20_shoup():
gf_factor_sqf(F_20, P_18, ZZ, method="shoup")
| 21.869565 | 63 | 0.695825 |
20f87c210ddb83e4efc2198967f977655fc0fadf | 693 | py | Python | tests/test_sampler.py | sonesuke/my-portfolio | 4fd19fdee8a0aa13194cab0df53c83218c5664e3 | [
"MIT"
] | 2 | 2021-03-14T00:14:25.000Z | 2021-09-04T16:26:02.000Z | tests/test_sampler.py | sonesuke/my-portfolio | 4fd19fdee8a0aa13194cab0df53c83218c5664e3 | [
"MIT"
] | 104 | 2021-02-21T08:11:11.000Z | 2021-09-26T03:02:27.000Z | tests/test_sampler.py | sonesuke/mypo | 4fd19fdee8a0aa13194cab0df53c83218c5664e3 | [
"MIT"
] | null | null | null | import os
import numpy.testing as npt
import pytest
from mypo import Market
from mypo.sampler import Sampler
skip_long_tests = pytest.mark.skipif(True, reason="This test takes long time.")
TEST_DATA = os.path.join(os.path.dirname(__file__), "data", "test.bin")
MODEL_DATA = os.path.join(os.path.dirname(__file__), "data", "sample.bin")
@skip_long_tests
def test_save_load() -> None:
market = Market.load(TEST_DATA)
market = market.head(10)
sampler = Sampler(market, samples=5)
sampler.save(MODEL_DATA)
def test_sample() -> None:
sampler = Sampler.load(MODEL_DATA)
samples = sampler.sample(100)
npt.assert_almost_equal(samples.mean(), [0.0034562, 0.001737])
| 25.666667 | 79 | 0.72583 |
66ecbae1521b89210e27ef63969a684f4409e34a | 351 | py | Python | test/espnet2/utils/test_model_summary.py | wrist/espnet | 7e6be051bd575bd82310d7d460ab68b7394c7926 | [
"Apache-2.0"
] | null | null | null | test/espnet2/utils/test_model_summary.py | wrist/espnet | 7e6be051bd575bd82310d7d460ab68b7394c7926 | [
"Apache-2.0"
] | null | null | null | test/espnet2/utils/test_model_summary.py | wrist/espnet | 7e6be051bd575bd82310d7d460ab68b7394c7926 | [
"Apache-2.0"
] | 1 | 2021-03-10T19:25:48.000Z | 2021-03-10T19:25:48.000Z | import torch
from espnet2.utils.model_summary import model_summary
class Model(torch.nn.Module):
def __init__(self):
super().__init__()
self.l1 = torch.nn.Linear(1000, 1000)
self.l2 = torch.nn.Linear(1000, 1000)
self.l3 = torch.nn.Linear(1000, 1000)
def test_model_summary():
print(model_summary(Model()))
| 21.9375 | 53 | 0.669516 |
6bbabb80e32937f2c33da9700a9205b1209aae77 | 2,394 | py | Python | src/visitpy/visit_flow/visit_flow_vpe/tests/test_flow_vpe.py | brugger1/visit | ed867b92bc54ed01d3c66ed354b231e30f0b3d0e | [
"BSD-3-Clause"
] | null | null | null | src/visitpy/visit_flow/visit_flow_vpe/tests/test_flow_vpe.py | brugger1/visit | ed867b92bc54ed01d3c66ed354b231e30f0b3d0e | [
"BSD-3-Clause"
] | null | null | null | src/visitpy/visit_flow/visit_flow_vpe/tests/test_flow_vpe.py | brugger1/visit | ed867b92bc54ed01d3c66ed354b231e30f0b3d0e | [
"BSD-3-Clause"
] | 1 | 2020-03-18T23:17:43.000Z | 2020-03-18T23:17:43.000Z | # Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
# Project developers. See the top-level LICENSE file for dates and other
# details. No copyright assignment is required to contribute to VisIt.
"""
file: test_flow_vpe_npy_ops.py
author: Cyrus Harrison <cyrush@llnl.gov>
created: 3/28/2012
description:
unittest test cases for flow vpe w/ npy_ops module.
"""
import os
import unittest
import visit
from os.path import join as pjoin
from flow import *
from flow.filters import npy_ops
from visit_flow_vpe import *
# uncomment for detailed exe info
#import logging
#logging.basicConfig(level=logging.INFO)
tests_dir = os.path.split(__file__)[0]
examples_dir = pjoin(tests_dir,"..","examples")
class TestFlowVPE(unittest.TestCase):
def setUp(self):
self.data_path = pjoin(tests_dir,"_data","rect2d.silo")
visit.OpenDatabase(self.data_path)
print ""
def setup_workspace(self,file):
define_flow_vpe("flow",pjoin(examples_dir,file),"pyocl_ops",0,0)
visit.AddPlot("Pseudocolor","flow")
visit.DrawPlots()
visit.DefineScalarExpression("check","flow - ((d + p)^2.0 + (d-p)^2.0)")
visit.AddPlot("Pseudocolor","check")
visit.DrawPlots()
def test_01_npy_ops(self):
self.setup_workspace("flow_vpe_npy_ops_example_1.py")
# the total sum of all scalar vals of 'check' should equal zero.
res = 1e8
if visit.Query("Variable Sum"):
res = visit.GetQueryOutputValue()
self.assertTrue(res < 1.0e-8)
def test_02_pyocl_ops(self):
self.setup_workspace("flow_vpe_pyocl_ops_example_1.py")
# the total sum of all scalar vals of 'check' should equal zero.
res = 1e8
if visit.Query("Variable Sum"):
res = visit.GetQueryOutputValue()
self.assertTrue(res < 1.0e-8)
def test_03_pyocl_compile(self):
self.setup_workspace("flow_vpe_pyocl_compile_example_1.py")
# the total sum of all scalar vals of 'check' should equal zero.
res = 1e8
if visit.Query("Variable Sum"):
res = visit.GetQueryOutputValue()
self.assertTrue(res < 1.0e-8)
def tearDown(self):
# clean up
visit.DeleteAllPlots()
visit.CloseDatabase(self.data_path)
visit.CloseComputeEngine()
if __name__ == '__main__':
unittest.main()
| 31.5 | 80 | 0.672515 |
711b50b8d17a3eb7bd4927df5b757d8a694c2df7 | 914 | py | Python | migrations/versions/3f9f0d212519_.py | MB2407/Book-Recommendation-Analysis | 791d487835653aeee877fff1337f0438f0a2ec4c | [
"MIT"
] | 1 | 2021-10-30T17:01:10.000Z | 2021-10-30T17:01:10.000Z | migrations/versions/3f9f0d212519_.py | MB2407/Book-Recommendation-Analysis | 791d487835653aeee877fff1337f0438f0a2ec4c | [
"MIT"
] | null | null | null | migrations/versions/3f9f0d212519_.py | MB2407/Book-Recommendation-Analysis | 791d487835653aeee877fff1337f0438f0a2ec4c | [
"MIT"
] | null | null | null | """empty message
Revision ID: 3f9f0d212519
Revises: 331300dc0a2e
Create Date: 2021-01-20 09:44:43.873635
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3f9f0d212519'
down_revision = '331300dc0a2e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('ratings',
sa.Column('rid', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('book_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['book_id'], ['books.bid'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('rid')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('ratings')
# ### end Alembic commands ###
| 25.388889 | 65 | 0.671772 |
48027bd17542f4a0da26b85aaf99dc9305cb053c | 1,257 | py | Python | sc2maptool/index.py | mikemhenry/sc2gameMapRepo | e73eda242dde483e8abf0acb5050720730543e0d | [
"Apache-2.0"
] | 1 | 2018-06-16T00:48:44.000Z | 2018-06-16T00:48:44.000Z | sc2maptool/index.py | mikemhenry/sc2gameMapRepo | e73eda242dde483e8abf0acb5050720730543e0d | [
"Apache-2.0"
] | 4 | 2018-06-14T03:16:52.000Z | 2018-06-15T23:50:02.000Z | sc2maptool/index.py | mikemhenry/sc2gameMapRepo | e73eda242dde483e8abf0acb5050720730543e0d | [
"Apache-2.0"
] | null | null | null |
from glob import glob
import os
from sc2maptool.mapRecord import MapRecord
from sc2maptool import constants as c
################################################################################
class IndexCache(object):
def __init__(self): pass
cache = IndexCache()
################################################################################
def getIndex(folderPath=c.MAPS_FOLDER):
"""parse the 'Maps' subfolder directory divining criteria for valid maps"""
try: return cache.structure
except AttributeError: pass # if it doesn't exist, generate and cache the map file data
############################################################################
def folderSearch(path, attrList=[]):
ret = []
for item in glob(os.path.join(path, '*')):
if item == os.sep: continue
itemName = os.path.basename(item)
if os.path.isdir(item): ret += folderSearch(item, attrList + [itemName])
elif itemName.endswith(c.SC2_MAP_EXT): ret.append( MapRecord(itemName, item, attrList) )
return ret
############################################################################
cache.structure = folderSearch(folderPath)
return cache.structure
| 39.28125 | 101 | 0.495625 |
c886b4aebdb2989515bfb4a464a7abceca4c2afc | 2,205 | py | Python | dashboard/views.py | sayyss/doraemon-dashboard | 8ed68828e294e145be2a0044d3c2b55470d70ec5 | [
"MIT"
] | null | null | null | dashboard/views.py | sayyss/doraemon-dashboard | 8ed68828e294e145be2a0044d3c2b55470d70ec5 | [
"MIT"
] | null | null | null | dashboard/views.py | sayyss/doraemon-dashboard | 8ed68828e294e145be2a0044d3c2b55470d70ec5 | [
"MIT"
] | null | null | null | import requests
from django.http import JsonResponse
from django.shortcuts import render, redirect
auth_url = "https://discord.com/api/oauth2/authorize?client_id=709321027775365150&redirect_uri=http%3A%2F%2F127.0.0.1%3A8000%2Fuser&response_type=code&scope=identify%20email%20guilds"
def index(request):
token = request.session.get('access_token')
user = None
guild = None
if token:
user, guild = getData(request.session['access_token'])
return render(request, 'index.html', {'user': user, 'guild': guild})
def auth(request):
token = request.session.get('access_token')
if token:
return redirect(user)
return redirect(auth_url)
def user(request):
token = request.session.get('access_token')
if token:
user, guild = getData(request.session['access_token'])
return JsonResponse({'user': user, 'guild': guild})
#return render(request, 'user.html', {'user': user, 'guild': guild})
code = request.GET.get('code')
user, guild, access_token = exchange_code(code)
request.session['access_token'] = access_token
request.session['userID'] = user['id']
return redirect(auth)
def getData(access_token):
user_response = requests.get("https://discord.com/api/v6/users/@me", headers={
'Authorization': 'Bearer %s' % access_token
})
guild_response = requests.get("https://discord.com/api/v6/users/@me/guilds", headers={
'Authorization': 'Bearer %s' % access_token
})
return user_response.json(), guild_response.json()
def exchange_code(code: str):
data = {
"client_id": "709321027775365150",
"client_secret": "I_7XyaUS-4QmW5ttSd8lNNt2XXCxd5U6",
"grant_type": "authorization_code",
"code": code,
"redirect_uri": "http://127.0.0.1:8000/user",
"scope": "identify email guild"
}
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
response = requests.post('https://discord.com/api/oauth2/token', data=data, headers=headers)
credentials = response.json()
access_token = credentials['access_token']
user, guild = getData(access_token)
return user, guild, access_token
| 31.056338 | 183 | 0.671655 |
66e732a42791200c73d7c2b50e6156e441eef7c2 | 2,053 | py | Python | mopidy/internal/xdg.py | grdorin/mopidy | 76db44088c102d7ad92a3fc6a15a938e66b99b0d | [
"Apache-2.0"
] | 6,700 | 2015-01-01T03:57:59.000Z | 2022-03-30T09:31:31.000Z | mopidy/internal/xdg.py | pnijhara/mopidy | 7168787ea6c82b66e138fc2b388d78fa1c7661ba | [
"Apache-2.0"
] | 1,141 | 2015-01-02T09:48:59.000Z | 2022-03-28T22:25:30.000Z | mopidy/internal/xdg.py | pnijhara/mopidy | 7168787ea6c82b66e138fc2b388d78fa1c7661ba | [
"Apache-2.0"
] | 735 | 2015-01-01T21:15:50.000Z | 2022-03-20T16:13:44.000Z | import configparser
import os
import pathlib
def get_dirs():
"""Returns a dict of all the known XDG Base Directories for the current user.
The keys ``XDG_CACHE_DIR``, ``XDG_CONFIG_DIR``, and ``XDG_DATA_DIR`` is
always available.
Additional keys, like ``XDG_MUSIC_DIR``, may be available if the
``$XDG_CONFIG_DIR/user-dirs.dirs`` file exists and is parseable.
See http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
for the XDG Base Directory specification.
"""
dirs = {
"XDG_CACHE_DIR": pathlib.Path(
os.getenv("XDG_CACHE_HOME", "~/.cache")
).expanduser(),
"XDG_CONFIG_DIR": pathlib.Path(
os.getenv("XDG_CONFIG_HOME", "~/.config")
).expanduser(),
"XDG_DATA_DIR": pathlib.Path(
os.getenv("XDG_DATA_HOME", "~/.local/share")
).expanduser(),
}
dirs.update(_get_user_dirs(dirs["XDG_CONFIG_DIR"]))
return dirs
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = xdg_config_dir / "user-dirs.dirs"
if not dirs_file.exists():
return {}
data = dirs_file.read_bytes()
data = b"[XDG_USER_DIRS]\n" + data
data = data.replace(b"$HOME", bytes(pathlib.Path.home()))
data = data.replace(b'"', b"")
config = configparser.RawConfigParser()
config.read_string(data.decode())
result = {}
for k, v in config.items("XDG_USER_DIRS"):
if v is None:
continue
if isinstance(k, bytes):
k = k.decode()
result[k.upper()] = pathlib.Path(v).resolve()
return result
| 29.753623 | 81 | 0.643936 |
0aa069e23f30e3500fe0391ba29c2ad1c19719cc | 3,392 | py | Python | models/all_models.py | rohandeshmukh91/NLP | e05e9bf989a91e451d9bbfbd0c7a34dfe152d007 | [
"MIT"
] | null | null | null | models/all_models.py | rohandeshmukh91/NLP | e05e9bf989a91e451d9bbfbd0c7a34dfe152d007 | [
"MIT"
] | 1 | 2021-06-02T00:49:53.000Z | 2021-06-02T00:49:53.000Z | models/all_models.py | rohandeshmukh91/NLP | e05e9bf989a91e451d9bbfbd0c7a34dfe152d007 | [
"MIT"
] | null | null | null |
# @TODO: Need to fix this - universal model pooling module
import gensim
from gensim.models.doc2vec import Doc2Vec, TaggedDocument
from nltk.tokenize import ToktokTokenizer
from nltk.tokenize import word_tokenize
from gensim.similarities import Similarity
from gensim.models import TfidfModel
from nltk.corpus import stopwords
class Models(object):
def __init__(self, question_list):
self.q_list = question_list
def d2v_model(self):
tagged_data = [TaggedDocument(words=word_tokenize(_d.lower()), tags=[str(i)]) for i, _d in enumerate(self)]
max_epochs = 100
vec_size = 20
alpha = 0.025
model = Doc2Vec(vector_size=vec_size,
alpha=alpha,
min_alpha=0.00025,
min_count=1,
dm=1)
model.build_vocab(tagged_data)
for epoch in range(max_epochs):
model.train(tagged_data,
total_examples=model.corpus_count,
epochs=model.iter)
model.alpha -= 0.0002
model.min_alpha = model.alpha
model.save("/Users/rdeshmukh003/Desktop/Rohan/Xoro/xoro-demo/models/d2v.bin")
def gensim_tdidf_model(self):
query_docs = [[w.lower() for w in word_tokenize(text)] for text in self]
dictionary = gensim.corpora.Dictionary(query_docs)
corpus = [dictionary.doc2bow(gen_doc) for gen_doc in query_docs]
tf_idf = TfidfModel(corpus)
gensim_model = Similarity('/Users/rdeshmukh003/Desktop/Rohan/Xoro/xoro-demo/models', tf_idf[corpus], num_features=len(dictionary))
return query_docs, dictionary, corpus, tf_idf, gensim_model
class Preprocessing(object):
def __init__(self, input_line):
self.input = input_line.lower()
stop_words = set(stopwords.words('english'))
word_tokens = ToktokTokenizer().tokenize(self)
tokenized_input = [w for w in word_tokens if w not in stop_words]
return tokenized_input
class CalcSimilarity(object):
def __init__(self, model, q_list, q_test):
self.model = model
self.q_list = q_list
self.new_q = q_test
def d2v_calc_sim(self):
similar_docs = self.model.docvecs.most_similar(positive=[self.model.infer_vector(self.new_q)])
return similar_docs
# d2v
d2v_model = Doc2Vec.load("/Users/rdeshmukh003/Desktop/Rohan/Xoro/xoro-demo/models/d2v.bin")
gensim_model = model_obj.gensim_tdidf_model()
new_q = "Please describe all entity applications that are made with McDonald's applications.".split(" ")
similarity_obj = all_models.CalcSimilarity(new_q)
similar_docs = d2v_model.docvecs.most_similar(positive=[d2v_model.infer_vector(new_q)])
print(similar_docs)
def gensim_calc_sim(self,dictionary, tf_idf, gensim_model):
query_docs = [[w.lower() for w in word_tokenize(text)] for text in self.new_q]
query_doc_bow = dictionary.doc2bow(query_docs)
query_doc_tf_idf = tf_idf[query_doc_bow]
similarity_score_list = gensim_model[query_doc_tf_idf].tolist()
most_similar_doc_index = similarity_score_list.index(max(similarity_score_list))
print(most_similar_doc_index)
print(self.q_list[most_similar_doc_index])
a_to_q = self.q_list[str(self.q_list[most_similar_doc_index])]
print(a_to_q) | 36.473118 | 138 | 0.681014 |
d1dec3b20d0e20f56442d8be927b0f6d14cb754f | 1,092 | py | Python | alipay/aop/api/response/AlipayOpenBpaasServiceQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/response/AlipayOpenBpaasServiceQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/response/AlipayOpenBpaasServiceQueryResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.BPaaSServiceInfo import BPaaSServiceInfo
class AlipayOpenBpaasServiceQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenBpaasServiceQueryResponse, self).__init__()
self._service_list = None
@property
def service_list(self):
return self._service_list
@service_list.setter
def service_list(self, value):
if isinstance(value, list):
self._service_list = list()
for i in value:
if isinstance(i, BPaaSServiceInfo):
self._service_list.append(i)
else:
self._service_list.append(BPaaSServiceInfo.from_alipay_dict(i))
def parse_response_content(self, response_content):
response = super(AlipayOpenBpaasServiceQueryResponse, self).parse_response_content(response_content)
if 'service_list' in response:
self.service_list = response['service_list']
| 33.090909 | 108 | 0.684982 |
28e22ce492e14eb8b01cf5a0c457e9860a0cca75 | 16,402 | py | Python | helpers/event_insights_helper.py | dc74089/the-blue-alliance | 2c47b6e8b61819eda4dcda5e0011cb2d5c7d3785 | [
"MIT"
] | null | null | null | helpers/event_insights_helper.py | dc74089/the-blue-alliance | 2c47b6e8b61819eda4dcda5e0011cb2d5c7d3785 | [
"MIT"
] | null | null | null | helpers/event_insights_helper.py | dc74089/the-blue-alliance | 2c47b6e8b61819eda4dcda5e0011cb2d5c7d3785 | [
"MIT"
] | null | null | null | import logging
import traceback
from collections import defaultdict
from consts.event_type import EventType
class EventInsightsHelper(object):
@classmethod
def calculate_event_insights(cls, matches, year):
INSIGHTS_MAP = {
2016: cls.calculate_event_insights_2016,
2017: cls.calculate_event_insights_2017,
}
if year in INSIGHTS_MAP:
return INSIGHTS_MAP[year](matches)
else:
return None
@classmethod
def calculate_event_insights_2017(cls, matches):
qual_matches = []
playoff_matches = []
for match in matches:
if match.comp_level == 'qm':
qual_matches.append(match)
else:
playoff_matches.append(match)
qual_insights = cls._calculate_event_insights_2017_helper(qual_matches)
playoff_insights = cls._calculate_event_insights_2017_helper(playoff_matches)
return {
'qual': qual_insights,
'playoff': playoff_insights,
}
@classmethod
def _calculate_event_insights_2017_helper(cls, matches):
# Auto
mobility_points_auto = 0
rotor_points_auto = 0
high_goals_auto = 0
low_goals_auto = 0
fuel_points_auto = 0
points_auto = 0
mobility_counts = 0
# Teleop
rotor_points_teleop = 0
high_goals_teleop = 0
low_goals_teleop = 0
fuel_points_teleop = 0
takeoff_points_teleop = 0
points_teleop = 0
takeoff_counts = 0
# Overall
rotor_1_engaged_auto = 0
rotor_2_engaged_auto = 0
rotor_1_engaged = 0
rotor_2_engaged = 0
rotor_3_engaged = 0
rotor_4_engaged = 0
rotor_points = 0
high_goals = 0
low_goals = 0
fuel_points = 0
kpa_achieved = 0
unicorn_matches = 0
winning_scores = 0
win_margins = 0
total_scores = 0
foul_scores = 0
high_kpa = [0, "", ""] # score, match key, match name
high_score = [0, "", ""] # kpa, match key, match name
finished_matches = 0
has_insights = False
for match in matches:
if not match.has_been_played:
continue
red_score = match.alliances['red']['score']
blue_score = match.alliances['blue']['score']
win_score = max(red_score, blue_score)
winning_scores += win_score
win_margins += (win_score - min(red_score, blue_score))
total_scores += red_score + blue_score
if win_score > high_score[0]:
high_score = [win_score, match.key_name, match.short_name]
for alliance_color in ['red', 'blue']:
try:
alliance_breakdown = match.score_breakdown[alliance_color]
# High kPa
kpa = alliance_breakdown['autoFuelPoints'] + alliance_breakdown['teleopFuelPoints']
if kpa > high_kpa[0]:
high_kpa = [kpa, match.key_name, match.short_name]
# Auto
mobility_points_auto += alliance_breakdown['autoMobilityPoints']
rotor_points_auto += alliance_breakdown['autoRotorPoints']
fuel_points_auto += alliance_breakdown['autoFuelPoints']
high_goals_auto += alliance_breakdown['autoFuelHigh']
low_goals_auto += alliance_breakdown['autoFuelLow']
points_auto += alliance_breakdown['autoPoints']
for i in xrange(3):
mobility_counts += 1 if alliance_breakdown['robot{}Auto'.format(i+1)] == 'Mobility' else 0
# Teleop
rotor_points_teleop += alliance_breakdown['teleopRotorPoints']
fuel_points_teleop += alliance_breakdown['teleopFuelPoints']
high_goals_teleop += alliance_breakdown['teleopFuelHigh']
low_goals_teleop += alliance_breakdown['teleopFuelLow']
takeoff_points_teleop += alliance_breakdown['teleopTakeoffPoints']
points_teleop += alliance_breakdown['teleopPoints']
takeoff_counts += 1 if alliance_breakdown['touchpadFar'] == 'ReadyForTakeoff' else 0
takeoff_counts += 1 if alliance_breakdown['touchpadMiddle'] == 'ReadyForTakeoff' else 0
takeoff_counts += 1 if alliance_breakdown['touchpadNear'] == 'ReadyForTakeoff' else 0
# Overall
rotor_1_engaged_auto += 1 if alliance_breakdown['rotor1Auto'] else 0
rotor_2_engaged_auto += 1 if alliance_breakdown['rotor2Auto'] else 0
rotor_1_engaged += 1 if alliance_breakdown['rotor1Engaged'] else 0
rotor_2_engaged += 1 if alliance_breakdown['rotor2Engaged'] else 0
rotor_3_engaged += 1 if alliance_breakdown['rotor3Engaged'] else 0
rotor_4_engaged += 1 if alliance_breakdown['rotor4Engaged'] else 0
rotor_points += alliance_breakdown['autoRotorPoints'] + alliance_breakdown['teleopRotorPoints']
high_goals += alliance_breakdown['autoFuelHigh'] + alliance_breakdown['teleopFuelHigh']
low_goals += alliance_breakdown['autoFuelLow'] + alliance_breakdown['teleopFuelLow']
fuel_points += alliance_breakdown['autoFuelPoints'] + alliance_breakdown['teleopFuelPoints']
kpa_bonus = alliance_breakdown['kPaRankingPointAchieved'] or alliance_breakdown['kPaBonusPoints'] > 0
kpa_achieved += 1 if kpa_bonus else 0
alliance_win = alliance_color == match.winning_alliance
unicorn_matches += 1 if alliance_win and kpa_bonus and alliance_breakdown['rotor4Engaged'] else 0
foul_scores += alliance_breakdown['foulPoints']
has_insights = True
except Exception, e:
msg = "Event insights failed for {}".format(match.key.id())
# event.get() below should be cheap since it's backed by context cache
if match.event.get().event_type_enum in EventType.SEASON_EVENT_TYPES:
logging.warning(msg)
logging.warning(traceback.format_exc())
else:
logging.info(msg)
finished_matches += 1
if not has_insights:
return None
if finished_matches == 0:
return {}
opportunities_1x = 2 * finished_matches # once per alliance
opportunities_3x = 6 * finished_matches # 3x per alliance
event_insights = {
# Auto
'average_mobility_points_auto': float(mobility_points_auto) / (2 * finished_matches),
'average_rotor_points_auto': float(rotor_points_auto) / (2 * finished_matches),
'average_fuel_points_auto': float(fuel_points_auto) / (2 * finished_matches),
'average_high_goals_auto': float(high_goals_auto) / (2 * finished_matches),
'average_low_goals_auto': float(low_goals_auto) / (2 * finished_matches),
'average_points_auto': float(points_auto) / (2 * finished_matches),
'mobility_counts': [mobility_counts, opportunities_3x, 100.0 * float(mobility_counts) / opportunities_3x],
# Teleop
'average_rotor_points_teleop': float(rotor_points_teleop) / (2 * finished_matches),
'average_fuel_points_teleop': float(fuel_points_teleop) / (2 * finished_matches),
'average_high_goals_teleop': float(high_goals_teleop) / (2 * finished_matches),
'average_low_goals_teleop': float(low_goals_teleop) / (2 * finished_matches),
'average_takeoff_points_teleop': float(takeoff_points_teleop) / (2 * finished_matches),
'average_points_teleop': float(points_teleop) / (2 * finished_matches),
'takeoff_counts': [takeoff_counts, opportunities_3x, 100.0 * float(takeoff_counts) / opportunities_3x],
# Overall
'average_rotor_points': float(rotor_points) / (2 * finished_matches),
'average_fuel_points': float(fuel_points) / (2 * finished_matches),
'average_high_goals': float(high_goals) / (2 * finished_matches),
'average_low_goals': float(low_goals) / (2 * finished_matches),
'rotor_1_engaged_auto': [rotor_1_engaged_auto, opportunities_1x, 100.0 * float(rotor_1_engaged_auto) / opportunities_1x],
'rotor_2_engaged_auto': [rotor_2_engaged_auto, opportunities_1x, 100.0 * float(rotor_2_engaged_auto) / opportunities_1x],
'rotor_1_engaged': [rotor_1_engaged, opportunities_1x, 100.0 * float(rotor_1_engaged) / opportunities_1x],
'rotor_2_engaged': [rotor_2_engaged, opportunities_1x, 100.0 * float(rotor_2_engaged) / opportunities_1x],
'rotor_3_engaged': [rotor_3_engaged, opportunities_1x, 100.0 * float(rotor_3_engaged) / opportunities_1x],
'rotor_4_engaged': [rotor_4_engaged, opportunities_1x, 100.0 * float(rotor_4_engaged) / opportunities_1x],
'kpa_achieved': [kpa_achieved, opportunities_1x, 100.0 * float(kpa_achieved) / opportunities_1x],
'unicorn_matches': [unicorn_matches, opportunities_1x, 100.0 * float(unicorn_matches) / opportunities_1x],
'average_win_score': float(winning_scores) / finished_matches,
'average_win_margin': float(win_margins) / finished_matches,
'average_score': float(total_scores) / (2 * finished_matches),
'average_foul_score': float(foul_scores) / (2 * finished_matches),
'high_score': high_score, # [score, match key, match name]
'high_kpa': high_kpa, # [kpa, match key, match name]
}
return event_insights
@classmethod
def calculate_event_insights_2016(cls, matches):
qual_matches = []
playoff_matches = []
for match in matches:
if match.comp_level == 'qm':
qual_matches.append(match)
else:
playoff_matches.append(match)
qual_insights = cls._calculate_event_insights_2016_helper(qual_matches)
playoff_insights = cls._calculate_event_insights_2016_helper(playoff_matches)
return {
'qual': qual_insights,
'playoff': playoff_insights,
}
@classmethod
def _calculate_event_insights_2016_helper(cls, matches):
# defenses
defense_opportunities = defaultdict(int)
defense_damaged = defaultdict(int)
breaches = 0
# towers
high_goals = 0
low_goals = 0
challenges = 0
scales = 0
captures = 0
# scores
winning_scores = 0
win_margins = 0
total_scores = 0
auto_scores = 0
crossing_scores = 0
boulder_scores = 0
tower_scores = 0
foul_scores = 0
high_score = [0, "", ""] # score, match key, match name
finished_matches = 0
has_insights = False
for match in matches:
if not match.has_been_played:
continue
red_score = match.alliances['red']['score']
blue_score = match.alliances['blue']['score']
win_score = max(red_score, blue_score)
winning_scores += win_score
win_margins += (win_score - min(red_score, blue_score))
total_scores += red_score + blue_score
if win_score > high_score[0]:
high_score = [win_score, match.key_name, match.short_name]
for alliance_color in ['red', 'blue']:
try:
alliance_breakdown = match.score_breakdown[alliance_color]
auto_scores += alliance_breakdown['autoPoints']
crossing_scores += alliance_breakdown['teleopCrossingPoints']
boulder_scores += alliance_breakdown['teleopBoulderPoints']
tower_scores += alliance_breakdown['teleopChallengePoints'] + alliance_breakdown['teleopScalePoints']
foul_scores += alliance_breakdown['foulPoints']
pos1 = 'LowBar'
pos2 = alliance_breakdown['position2']
pos3 = alliance_breakdown['position3']
pos4 = alliance_breakdown['position4']
pos5 = alliance_breakdown['position5']
positions = [pos1, pos2, pos3, pos4, pos5]
for pos_idx, pos in enumerate(positions):
defense_opportunities[pos] += 1
if alliance_breakdown['position{}crossings'.format(pos_idx + 1)] == 2:
defense_damaged[pos] += 1
breaches += 1 if alliance_breakdown['teleopDefensesBreached'] else 0
high_goals += alliance_breakdown['autoBouldersHigh'] + alliance_breakdown['teleopBouldersHigh']
low_goals += alliance_breakdown['autoBouldersLow'] + alliance_breakdown['teleopBouldersLow']
captures += 1 if alliance_breakdown['teleopTowerCaptured'] else 0
for tower_face in ['towerFaceA', 'towerFaceB', 'towerFaceC']:
if alliance_breakdown[tower_face] == 'Challenged':
challenges += 1
elif alliance_breakdown[tower_face] == 'Scaled':
scales += 1
has_insights = True
except Exception, e:
msg = "Event insights failed for {}".format(match.key.id())
# event.get() below should be cheap since it's backed by context cache
if match.event.get().event_type_enum in EventType.SEASON_EVENT_TYPES:
logging.warning(msg)
else:
logging.info(msg)
finished_matches += 1
if not has_insights:
return None
if finished_matches == 0:
return {}
opportunities_1x = 2 * finished_matches # once per alliance
opportunities_3x = 6 * finished_matches # 3x per alliance
event_insights = {
'LowBar': [0, 0, 0],
'A_ChevalDeFrise': [0, 0, 0],
'A_Portcullis': [0, 0, 0],
'B_Ramparts': [0, 0, 0],
'B_Moat': [0, 0, 0],
'C_SallyPort': [0, 0, 0],
'C_Drawbridge': [0, 0, 0],
'D_RoughTerrain': [0, 0, 0],
'D_RockWall': [0, 0, 0],
'average_high_goals': float(high_goals) / (2 * finished_matches),
'average_low_goals': float(low_goals) / (2 * finished_matches),
'breaches': [breaches, opportunities_1x, 100.0 * float(breaches) / opportunities_1x], # [# success, # opportunities, %]
'scales': [scales, opportunities_3x, 100.0 * float(scales) / opportunities_3x],
'challenges': [challenges, opportunities_3x, 100.0 * float(challenges) / opportunities_3x],
'captures': [captures, opportunities_1x, 100.0 * float(captures) / opportunities_1x],
'average_win_score': float(winning_scores) / finished_matches,
'average_win_margin': float(win_margins) / finished_matches,
'average_score': float(total_scores) / (2 * finished_matches),
'average_auto_score': float(auto_scores) / (2 * finished_matches),
'average_crossing_score': float(crossing_scores) / (2 * finished_matches),
'average_boulder_score': float(boulder_scores) / (2 * finished_matches),
'average_tower_score': float(tower_scores) / (2 * finished_matches),
'average_foul_score': float(foul_scores) / (2 * finished_matches),
'high_score': high_score, # [score, match key, match name]
}
for defense, opportunities in defense_opportunities.items():
event_insights[defense] = [defense_damaged[defense], opportunities, 100.0 * float(defense_damaged[defense]) / opportunities] # [# damaged, # opportunities, %]
return event_insights
| 47.404624 | 171 | 0.600597 |
3f7fe6e20f1fe00fade9aeb0b2f86742ff0a94cc | 3,853 | py | Python | modules/flying_square.py | StephD/CJ8-blessed-badgers | 0e0f340efd6e45baf04bd4c25ce5c569fcfe344e | [
"MIT"
] | 2 | 2021-07-17T16:49:25.000Z | 2021-11-16T11:40:47.000Z | modules/flying_square.py | StephD/CJ8-blessed-badgers | 0e0f340efd6e45baf04bd4c25ce5c569fcfe344e | [
"MIT"
] | null | null | null | modules/flying_square.py | StephD/CJ8-blessed-badgers | 0e0f340efd6e45baf04bd4c25ce5c569fcfe344e | [
"MIT"
] | 1 | 2022-01-05T20:49:09.000Z | 2022-01-05T20:49:09.000Z | import numpy as np
from numpy import cos, pi, sin
class Square:
"""Square class will display a rotating square for the start menu"""
vertices: np.array
center: np.array
rotation: float
velocity: np.array
angular_velocity: float
def __init__(self, vertices: np.array = None, velocity: np.array = None, angular_velocity: float = None):
"""Initialize the square and all of its properties."""
if vertices is None:
self.vertices = np.array(
[
[-2.0, 1.0],
[-2.0, 5.0],
[2.0, 5.0],
[2.0, 1.0],
]
)
else:
self.vertices = vertices
self.velocity = (
velocity
if velocity is not None
else np.array(
[
np.random.uniform(0.2, 0.3) * np.random.choice([-1, 1]),
np.random.uniform(0.2, 0.3) * np.random.choice([-1, 1]),
]
)
)
self.angular_velocity = (
angular_velocity
if angular_velocity is not None
else np.random.uniform(0.02, 0.05) * np.random.choice([-1, 1])
)
self.center = np.array(
[
sum(point[0] for point in self.vertices) / len(self.vertices),
sum(point[1] for point in self.vertices) / len(self.vertices),
]
)
self.rotation = 0
def rotate(self, delta_theta: float) -> None:
"""Rotate the square"""
self.rotation = (self.rotation + delta_theta) % (2 * pi)
rotation_matrix = np.array(
[
[cos(delta_theta), -sin(delta_theta)],
[sin(delta_theta), cos(delta_theta)],
]
)
local_positions = self.vertices - self.center
self.vertices = (rotation_matrix @ local_positions.T).T + self.center
def translate(self, delta_pos: np.array) -> None:
"""Translate the positions of the center and vertices of the square"""
self.center += delta_pos
self.vertices += delta_pos
def contains(self, points: np.array) -> np.array:
"""
To ensure that a point is present within the array that represents the square
https://math.stackexchange.com/a/190373
"""
a_coordinates, b_coordinates, _, d_coordinates = self.vertices
ab_vector = a_coordinates - b_coordinates
ad_vector = a_coordinates - d_coordinates
am_vector = ((-points).T + a_coordinates).T
return (
(np.dot(am_vector.T, ab_vector) > 0)
& (np.dot(am_vector.T, ab_vector) < np.dot(ab_vector, ab_vector))
& (np.dot(am_vector.T, ad_vector) > 0)
& (np.dot(am_vector.T, ad_vector) < np.dot(ad_vector, ad_vector))
).T
def update(self, screen_dimensions: tuple[tuple[float, float], tuple[float, float]]) -> None:
"""Updates square with new properties"""
(lower_x, lower_y), (upper_x, upper_y) = screen_dimensions
self.translate(self.velocity)
self.rotate(self.angular_velocity)
for vertex in self.vertices:
if vertex[1] < lower_x:
self.velocity[1] = abs(self.velocity[1])
if vertex[1] > upper_x:
self.velocity[1] = -abs(self.velocity[1])
if vertex[0] < lower_y:
self.velocity[0] = abs(self.velocity[0])
if vertex[0] > upper_y:
self.velocity[0] = -abs(self.velocity[0])
def to_be_painted(self, row: np.array, col: np.array) -> set[tuple[int, int]]:
"""Indicate the area to be represented to show the squares."""
return {(int(x), int(y)) for x, y in np.transpose(self.contains(np.array([row, col])).nonzero())}
| 37.407767 | 109 | 0.545549 |
e35b343988f13e9ce1b56a9972c0c670cb53cc81 | 4,936 | py | Python | shuffle-employees.py | Carsmaniac/erisly-tower-shuffler | 35a593239d4ce2295f9e1266824a378b7aabeed8 | [
"MIT"
] | null | null | null | shuffle-employees.py | Carsmaniac/erisly-tower-shuffler | 35a593239d4ce2295f9e1266824a378b7aabeed8 | [
"MIT"
] | null | null | null | shuffle-employees.py | Carsmaniac/erisly-tower-shuffler | 35a593239d4ce2295f9e1266824a378b7aabeed8 | [
"MIT"
] | null | null | null | from sys import exit
from time import sleep
try:
import keyboard
except ModuleNotFoundError:
print("Could not import \"keyboard\" library (pip install keyboard)")
exit()
start_hotkey = "F7"
message_send_delay = 2
key_press_delay = 0.02
def send_message(string):
# It's not about money, it's about sending a message
keyboard.write(string, delay = key_press_delay)
sleep(key_press_delay * 2)
keyboard.send("enter")
sleep(message_send_delay)
def calculate_earnings(employee_list):
earnings = 0
rarity_multiplier = {"L":2, "R":1.5, "U":1.25, "C":1}
for emp in employee_list:
earnings += int(emp[3]) * rarity_multiplier[emp[2]]
return int(earnings)
# Read the employees text file
try:
file = open("employees.txt", "r")
text_lines = file.readlines()
file.close()
except FileNotFoundError:
print("Employees file does not exist")
exit()
try:
floors = int(text_lines[1].rstrip())
except IndexError:
print("Employees file not set up correctly")
exit()
except ValueError:
print("Number of floors is not a number")
exit()
# Extract a list of employees
employees = []
for i in range(len(text_lines) - 4):
if text_lines[i + 4] != "":
employee = text_lines[i + 4].rstrip().split(" ")
if len(employee) < 4:
print("Not all employees are set up correctly")
print("Each employee needs a first name, last name, rarity, and assigned floor (0 for unassigned)")
print("e.g. Guy Day U 4")
exit()
else:
# Convert assigned floor to int
try:
employee[3] = int(employee[3])
except ValueError:
print("{} {}'s assigned floor is not a number".format(employee[0], employee[1]))
employees.append(employee)
employees.sort()
# Sort employees by rarity, rarest first
sorted_employees = []
try:
for letter in ["L", "R", "U", "C"]:
for i in range(len(employees)):
if employees[i][2] == letter:
sorted_employees.append(employees[i])
except IndexError:
print("Not all employees have first & last names and rarities")
exit()
# If floors != employees
if floors > len(sorted_employees):
print("More floors than employees, some floors will remain empty")
elif floors < len(sorted_employees):
print("More employees than floors, some employees will remain unassigned")
# Determine which floor each employee will end up on
for i in range(len(sorted_employees)):
sorted_employees[i].append(max(floors - i, 0))
# Calculate earnings pre-shuffle
earnings_before = calculate_earnings(sorted_employees)
# Wait until ready to begin typing
print("Ready to shuffle employees, place text cursor in Discord channel and press " + start_hotkey)
keyboard.wait(start_hotkey)
# Move employees
print("\nAssigning all employees to optimal floors")
for i in range(len(sorted_employees)):
this_emp = sorted_employees[i]
if this_emp[3] == this_emp[4]:
print("{} {} already on the correct floor, skipping".format(this_emp[0], this_emp[1]))
elif this_emp[4] == 0:
print("Unassigning {} {}".format(this_emp[0], this_emp[1]))
send_message(">tower employees unassign {} {} {}".format(this_emp[0], this_emp[1]))
this_emp[3] = this_emp[4]
else:
swapped = False
for j in range(len(sorted_employees)):
if sorted_employees[j][3] == this_emp[4] and j != i:
other_emp = sorted_employees[j]
print("Swapping {} {} and {} {}".format(this_emp[0], this_emp[1], other_emp[0], other_emp[1]))
send_message(">tower employees swap {} {} {} {}".format(this_emp[0], this_emp[1], other_emp[0], other_emp[1]))
other_emp[3] = this_emp[3]
this_emp[3] = this_emp[4]
if not swapped:
if this_emp[3] != this_emp[4]:
print("Assigning {} {} to correct floor".format(this_emp[0], this_emp[1]))
send_message(">tower employees assign {} {} {}".format(this_emp[0], this_emp[1], this_emp[4]))
this_emp[3] = this_emp[4]
# Rewrite employees.txt to reflect changes
print("Updating employees.txt")
file = open("employees.txt", "w")
file.write("Number of floors to populate:\n")
file.write("{}\n\n".format(floors))
file.write("Employees - firstname lastname rarity(C/U/R/L) currentfloor:\n")
for emp in sorted_employees:
emp.pop()
emp[3] = str(emp[3])
file.write(" ".join(emp) + "\n")
file.close()
# Calculate tower profit
earnings_after = calculate_earnings(sorted_employees)
print("\nHourly earnings before shuffling: {} ({} per 3 hours)".format(earnings_before, earnings_before * 3))
print("Hourly earnings after shuffling: {} ({} per 3 hours), {}% improvement".format(earnings_after, earnings_after * 3, int(100 - earnings_before/earnings_after * 100)))
input("\nPress enter to close")
| 36.835821 | 170 | 0.649109 |
1eb4ae0ae33ac7d362568a64e80f85256e178f7f | 3,097 | py | Python | lib/rucio/tests/test_daemons.py | justincc/rucio | 95d81403c835d9f43fc30d328a8e2e388617a369 | [
"Apache-2.0"
] | null | null | null | lib/rucio/tests/test_daemons.py | justincc/rucio | 95d81403c835d9f43fc30d328a8e2e388617a369 | [
"Apache-2.0"
] | null | null | null | lib/rucio/tests/test_daemons.py | justincc/rucio | 95d81403c835d9f43fc30d328a8e2e388617a369 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020-2021 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Benedikt Ziemons <benedikt.ziemons@cern.ch>, 2020
# - Martin Barisits <martin.barisits@cern.ch>, 2021
import sys
import pytest
import rucio.db.sqla.util
from rucio.common import exception
from rucio.daemons.abacus import account, collection_replica, rse
from rucio.daemons.atropos import atropos
from rucio.daemons.automatix import automatix
from rucio.daemons.badreplicas import minos, minos_temporary_expiration, necromancer
from rucio.daemons.c3po import c3po
from rucio.daemons.cache import consumer
from rucio.daemons.conveyor import finisher, fts_throttler, poller, poller_latest, receiver, stager, submitter, throttler, preparer
from rucio.daemons.follower import follower
from rucio.daemons.hermes import hermes, hermes2
from rucio.daemons.judge import cleaner, evaluator, injector, repairer
from rucio.daemons.oauthmanager import oauthmanager
from rucio.daemons.reaper import dark_reaper, light_reaper, reaper2
from rucio.daemons.replicarecoverer import suspicious_replica_recoverer
from rucio.daemons.sonar.distribution import distribution_daemon
from rucio.daemons.tracer import kronos
from rucio.daemons.transmogrifier import transmogrifier
from rucio.daemons.undertaker import undertaker
if sys.version_info >= (3, 3):
from unittest import mock
else:
import mock
DAEMONS = [
account,
collection_replica,
rse,
atropos,
automatix,
minos,
minos_temporary_expiration,
necromancer,
c3po,
consumer,
finisher,
fts_throttler,
poller,
poller_latest,
receiver,
stager,
submitter,
throttler,
preparer,
follower,
hermes,
hermes2,
cleaner,
evaluator,
injector,
repairer,
oauthmanager,
dark_reaper,
light_reaper,
reaper2,
suspicious_replica_recoverer,
distribution_daemon,
# sonar_v3_dev_daemon, -- lib/rucio/common/config.py:55: NoSectionError: No section: 'sonar'
kronos,
transmogrifier,
undertaker,
]
ids = [mod.__name__ for mod in DAEMONS]
@pytest.mark.parametrize('daemon', argvalues=DAEMONS, ids=ids)
@mock.patch('rucio.db.sqla.util.is_old_db')
def test_fail_on_old_database(mock_is_old_db, daemon):
""" DAEMON: Test daemon failure on old database """
mock_is_old_db.return_value = True
assert rucio.db.sqla.util.is_old_db() is True
with pytest.raises(exception.DatabaseException, match='Database was not updated, daemon won\'t start'):
daemon.run()
assert mock_is_old_db.call_count > 1
| 30.067961 | 131 | 0.757184 |
dd134d13d1980103ed5867071666395830df2038 | 59,656 | py | Python | glance/tests/functional/v1/test_api.py | dreamhost/glance | 1d91a4dc2b74d224ea12947d672aa66a576d9d9a | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/v1/test_api.py | dreamhost/glance | 1d91a4dc2b74d224ea12947d672aa66a576d9d9a | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/v1/test_api.py | dreamhost/glance | 1d91a4dc2b74d224ea12947d672aa66a576d9d9a | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Functional test case that utilizes httplib2 against the API server"""
import datetime
import hashlib
import json
import tempfile
import httplib2
from glance.openstack.common import timeutils
from glance.tests import functional
from glance.tests.utils import skip_if_disabled, minimal_headers
FIVE_KB = 5 * 1024
FIVE_GB = 5 * 1024 * 1024 * 1024
class TestApi(functional.FunctionalTest):
"""Functional tests using httplib2 against the API server"""
@skip_if_disabled
def test_get_head_simple_post(self):
"""
We test the following sequential series of actions:
0. GET /images
- Verify no public images
1. GET /images/detail
- Verify no public images
2. POST /images with public image named Image1
and no custom properties
- Verify 201 returned
3. HEAD image
- Verify HTTP headers have correct information we just added
4. GET image
- Verify all information on image we just added is correct
5. GET /images
- Verify the image we just added is returned
6. GET /images/detail
- Verify the image we just added is returned
7. PUT image with custom properties of "distro" and "arch"
- Verify 200 returned
8. GET image
- Verify updated information about image was stored
9. PUT image
- Remove a previously existing property.
10. PUT image
- Add a previously deleted property.
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key, expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"images": [
{"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"name": "Image1",
"checksum": "c2e5db72bd7fd153f53ede5da5a06de3",
"size": 5120}]}
self.assertEqual(json.loads(content), expected_result)
# 6. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
image['images'][0][expected_key]))
# 7. PUT image with custom properties of "distro" and "arch"
# Verify 200 returned
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['properties']['arch'], "x86_64")
self.assertEqual(data['image']['properties']['distro'], "Ubuntu")
# 8. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {'distro': 'Ubuntu', 'arch': 'x86_64'},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
image['images'][0][expected_key]))
# 9. PUT image and remove a previously existing property.
headers = {'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 1)
self.assertEqual(data['properties']['arch'], "x86_64")
# 10. PUT image and add a previously deleted property.
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 2)
self.assertEqual(data['properties']['arch'], "x86_64")
self.assertEqual(data['properties']['distro'], "Ubuntu")
self.assertNotEqual(data['created_at'], data['updated_at'])
# DELETE image
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_queued_process_flow(self):
"""
We test the process flow where a user registers an image
with Glance but does not immediately upload an image file.
Later, the user uploads an image file using a PUT operation.
We track the changing of image status throughout this process.
0. GET /images
- Verify no public images
1. POST /images with public image named Image1 with no location
attribute and no image data.
- Verify 201 returned
2. GET /images
- Verify one public image
3. HEAD image
- Verify image now in queued status
4. PUT image with image data
- Verify 200 returned
5. HEAD images
- Verify image now in active status
6. GET /images
- Verify one public image
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
# with no location or image data
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['checksum'], None)
self.assertEqual(data['image']['size'], 0)
self.assertEqual(data['image']['container_format'], 'ovf')
self.assertEqual(data['image']['disk_format'], 'raw')
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
image_id = data['image']['id']
# 2. GET /images
# Verify 1 public image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['checksum'], None)
self.assertEqual(data['images'][0]['size'], 0)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# 3. HEAD /images
# Verify status is in queued
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "queued")
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-id'], image_id)
# 4. PUT image with image data, verify 200 returned
image_data = "*" * FIVE_KB
headers = {'Content-Type': 'application/octet-stream'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers,
body=image_data)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 5. HEAD /images
# Verify status is in active
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "active")
# 6. GET /images
# Verify 1 public image still...
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['size'], FIVE_KB)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# DELETE image
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_size_greater_2G_mysql(self):
"""
A test against the actual datastore backend for the registry
to ensure that the image size property is not truncated.
:see https://bugs.launchpad.net/glance/+bug/739433
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 1. POST /images with public image named Image1
# attribute and a size of 5G. Use the HTTP engine with an
# X-Image-Meta-Location attribute to make Glance forego
# "adding" the image data.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Location': 'http://example.com/fakeimage',
'X-Image-Meta-Size': str(FIVE_GB),
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD /images
# Verify image size is what was passed in, and not truncated
path = response.get('location')
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], str(FIVE_GB))
self.assertEqual(response['x-image-meta-name'], 'Image1')
self.assertEqual(response['x-image-meta-is_public'], 'True')
self.stop_servers()
@skip_if_disabled
def test_zero_initial_size(self):
"""
A test to ensure that an image with size explicitly set to zero
has status that immediately transitions to active.
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 1. POST /images with public image named Image1
# attribute and a size of zero.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Size': '0',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD image-location
# Verify image size is zero and the status is active
path = response.get('location')
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-status'], 'active')
# 3. GET image-location
# Verify image content is empty
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(len(content), 0)
self.stop_servers()
@skip_if_disabled
def test_traceback_not_consumed(self):
"""
A test that errors coming from the POST API do not
get consumed and print the actual error message, and
not something like <traceback object at 0x1918d40>
:see https://bugs.launchpad.net/glance/+bug/755912
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# POST /images with binary data, but not setting
# Content-Type to application/octet-stream, verify a
# 400 returned and that the error is readable.
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST',
body=test_data_file.name)
self.assertEqual(response.status, 400)
expected = "Content-Type must be application/octet-stream"
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
self.stop_servers()
@skip_if_disabled
def test_filtered_images(self):
"""
Set up four test images and ensure each query param filter works
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images, and one private image
# with various attributes
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'True',
'X-Image-Meta-Property-pants': 'are on'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vhd',
'X-Image-Meta-Size': '20',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are on'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '21',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are off'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are off")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Private Image',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '22',
'X-Image-Meta-Is-Public': 'False',
'X-Image-Meta-Protected': 'False'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['is_public'], False)
image_ids.append(data['image']['id'])
# 2. GET /images
# Verify three public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 3. GET /images with name filter
# Verify correct images returned with name
params = "name=My%20Image!"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['name'], "My Image!")
# 4. GET /images with status filter
# Verify correct images returned with status
params = "status=queued"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertEqual(image['status'], "queued")
params = "status=active"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 5. GET /images with container_format filter
# Verify correct images returned with container_format
params = "container_format=ovf"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['container_format'], "ovf")
# 6. GET /images with disk_format filter
# Verify correct images returned with disk_format
params = "disk_format=vdi"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['disk_format'], "vdi")
# 7. GET /images with size_max filter
# Verify correct images returned with size <= expected
params = "size_max=20"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] <= 20)
# 8. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=20"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] >= 20)
# 9. Get /images with is_public=None filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=None"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 4)
# 10. Get /images with is_public=False filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=False"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "My Private Image")
# 11. Get /images with is_public=True filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=True"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertNotEqual(image['name'], "My Private Image")
# 12. Get /images with protected=False filter
# Verify correct images returned with property
params = "protected=False"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertNotEqual(image['name'], "Image1")
# 13. Get /images with protected=True filter
# Verify correct images returned with property
params = "protected=True"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "Image1")
# 14. GET /images with property filter
# Verify correct images returned with property
params = "property-pants=are%20on"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
# 15. GET /images with property filter and name filter
# Verify correct images returned with property and name
# Make sure you quote the url when using more than one param!
params = "name=My%20Image!&property-pants=are%20on"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
self.assertEqual(image['name'], "My Image!")
# 16. GET /images with past changes-since filter
yesterday = timeutils.isotime(timeutils.utcnow() -
datetime.timedelta(1))
params = "changes-since=%s" % yesterday
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# one timezone west of Greenwich equates to an hour ago
# taking care to pre-urlencode '+' as '%2B', otherwise the timezone
# '+' is wrongly decoded as a space
# TODO(eglynn): investigate '+' --> <SPACE> decoding, an artifact
# of WSGI/webob dispatch?
now = timeutils.utcnow()
hour_ago = now.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
params = "changes-since=%s" % hour_ago
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 17. GET /images with future changes-since filter
tomorrow = timeutils.isotime(timeutils.utcnow() +
datetime.timedelta(1))
params = "changes-since=%s" % tomorrow
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# one timezone east of Greenwich equates to an hour from now
now = timeutils.utcnow()
hour_hence = now.strftime('%Y-%m-%dT%H:%M:%S-01:00')
params = "changes-since=%s" % hour_hence
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 18. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_min got -1" in content)
# 19. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_max=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_max got -1" in content)
# 20. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "min_ram=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("Bad value passed to filter min_ram got -1" in content)
# 21. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "protected=imalittleteapot"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("protected got imalittleteapot" in content)
# 22. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "is_public=imalittleteapot"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("is_public got imalittleteapot" in content)
self.stop_servers()
@skip_if_disabled
def test_limited_images(self):
"""
Ensure marker and limit query params work
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images with various attributes
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image2')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image3')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with all images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 3)
# 3. GET /images with limit of 2
# Verify only two images were returned
params = "limit=2"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[0]['id'])
self.assertEqual(data[1]['id'], images[1]['id'])
# 4. GET /images with marker
# Verify only two images were returned
params = "marker=%s" % images[0]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[1]['id'])
self.assertEqual(data[1]['id'], images[2]['id'])
# 5. GET /images with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# 6. GET /images/detail with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# DELETE images
for image_id in image_ids:
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_ordered_images(self):
"""
Set up three test images and ensure each query param filter works
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with three public images with various attributes
image_ids = []
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'ASDF',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'bare',
'X-Image-Meta-Disk-Format': 'iso',
'X-Image-Meta-Size': '2',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'XYZ',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '5',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with no query params
# Verify three public images sorted by created_at desc
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
self.assertEqual(data['images'][2]['id'], image_ids[0])
# 3. GET /images sorted by name asc
params = 'sort_key=name&sort_dir=asc'
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[1])
self.assertEqual(data['images'][1]['id'], image_ids[0])
self.assertEqual(data['images'][2]['id'], image_ids[2])
# 4. GET /images sorted by size desc
params = 'sort_key=size&sort_dir=desc'
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[0])
self.assertEqual(data['images'][1]['id'], image_ids[2])
self.assertEqual(data['images'][2]['id'], image_ids[1])
# 5. GET /images sorted by size desc with a marker
params = 'sort_key=size&sort_dir=desc&marker=%s' % image_ids[0]
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
# 6. GET /images sorted by name asc with a marker
params = 'sort_key=name&sort_dir=asc&marker=%s' % image_ids[2]
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# DELETE images
for image_id in image_ids:
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_duplicate_image_upload(self):
"""
Upload initial image, then attempt to upload duplicate image
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image = json.loads(content)['image']
# 2. POST /images with public image named Image1, and ID: 1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1 Update',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Id': image['id'],
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 409)
self.stop_servers()
@skip_if_disabled
def test_delete_not_existing(self):
"""
We test the following:
0. GET /images/1
- Verify 404
1. DELETE /images/1
- Verify 404
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
api_port = self.api_port
registry_port = self.registry_port
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. DELETE /images/1
# Verify 404 returned
path = "http://%s:%d/v1/images/1" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 404)
self.stop_servers()
@skip_if_disabled
def test_unsupported_default_store(self):
"""
We test that a mis-configured default_store causes the API server
to fail to start.
"""
self.cleanup()
self.default_store = 'shouldnotexist'
# ensure failure exit code is available to assert on
# -- on slower machines this needs a few seconds or
# the unit test will fail
self.api_server.server_control_options += ' --await-child=3'
# ensure that the API server fails to launch
self.start_server(self.api_server,
expect_launch=False,
expected_exitcode=255,
**self.__dict__.copy())
def _do_test_post_image_content_missing_format(self, format):
"""
We test that missing container/disk format fails with 400 "Bad Request"
:see https://bugs.launchpad.net/glance/+bug/933702
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
# POST /images without given format being specified
headers = minimal_headers('Image1')
del headers['X-Image-Meta-' + format]
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
http = httplib2.Http()
response, content = http.request(path, 'POST',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Details: Invalid %s format 'None' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
self.stop_servers()
@skip_if_disabled
def _do_test_post_image_content_missing_diskformat(self):
self._do_test_post_image_content_missing_format('container_format')
@skip_if_disabled
def _do_test_post_image_content_missing_disk_format(self):
self._do_test_post_image_content_missing_format('disk_format')
def _do_test_put_image_content_missing_format(self, format):
"""
We test that missing container/disk format only fails with
400 "Bad Request" when the image content is PUT (i.e. not
on the original POST of a queued image).
:see https://bugs.launchpad.net/glance/+bug/937216
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# POST queued image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
headers = {
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Is-Public': 'True',
}
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# PUT image content images without given format being specified
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
headers = minimal_headers('Image1')
del headers['X-Image-Meta-' + format]
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
http = httplib2.Http()
response, content = http.request(path, 'PUT',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Details: Invalid %s format 'None' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
self.stop_servers()
@skip_if_disabled
def _do_test_put_image_content_missing_diskformat(self):
self._do_test_put_image_content_missing_format('container_format')
@skip_if_disabled
def _do_test_put_image_content_missing_disk_format(self):
self._do_test_put_image_content_missing_format('disk_format')
@skip_if_disabled
def test_ownership(self):
self.cleanup()
self.api_server.deployment_flavor = 'fakeauth'
self.registry_server.deployment_flavor = 'fakeauth'
self.start_servers(**self.__dict__.copy())
# Add an image with admin privileges and ensure the owner
# can be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
create_headers.update(auth_headers)
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
# Now add an image without admin privileges and ensure the owner
# cannot be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:role1',
}
create_headers.update(auth_headers)
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers.update(auth_headers)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# Make sure the non-privileged user can't update their owner either
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
'X-Auth-Token': 'user1:tenant1:role1',
}
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=update_headers)
self.assertEqual(response.status, 200)
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# An admin user should be able to update the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant3:admin',
}
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
}
update_headers.update(auth_headers)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=update_headers)
self.assertEqual(response.status, 200)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
self.stop_servers()
| 42.279235 | 79 | 0.570286 |
157dc1ea46db2872b614448fa3db0f741101e40c | 21,924 | py | Python | cinder/opts.py | cloudification-io/cinder | 23d76e01f2b4f3771b57fb287084a4884238b827 | [
"Apache-2.0"
] | null | null | null | cinder/opts.py | cloudification-io/cinder | 23d76e01f2b4f3771b57fb287084a4884238b827 | [
"Apache-2.0"
] | null | null | null | cinder/opts.py | cloudification-io/cinder | 23d76e01f2b4f3771b57fb287084a4884238b827 | [
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###################################################################
# WARNING!
#
# Do not edit this file directly. This file should be generated by
# running the command "tox -e genopts" any time a config option
# has been added, changed, or removed.
###################################################################
import itertools
from keystoneauth1 import loading
from cinder import objects # noqa
objects.register_all()
from cinder.api import common as cinder_api_common
from cinder.api.middleware import auth as cinder_api_middleware_auth
from cinder.api.views import versions as cinder_api_views_versions
from cinder.backup import api as cinder_backup_api
from cinder.backup import chunkeddriver as cinder_backup_chunkeddriver
from cinder.backup import driver as cinder_backup_driver
from cinder.backup.drivers import ceph as cinder_backup_drivers_ceph
from cinder.backup.drivers import gcs as cinder_backup_drivers_gcs
from cinder.backup.drivers import glusterfs as cinder_backup_drivers_glusterfs
from cinder.backup.drivers import nfs as cinder_backup_drivers_nfs
from cinder.backup.drivers import posix as cinder_backup_drivers_posix
from cinder.backup.drivers import s3 as cinder_backup_drivers_s3
from cinder.backup.drivers import swift as cinder_backup_drivers_swift
from cinder.backup import manager as cinder_backup_manager
from cinder.cmd import backup as cinder_cmd_backup
from cinder.cmd import volume as cinder_cmd_volume
from cinder.common import config as cinder_common_config
import cinder.compute
from cinder.compute import nova as cinder_compute_nova
from cinder import context as cinder_context
from cinder import coordination as cinder_coordination
from cinder.db import api as cinder_db_api
from cinder.db import base as cinder_db_base
from cinder.image import glance as cinder_image_glance
from cinder.image import image_utils as cinder_image_imageutils
from cinder.keymgr import conf_key_mgr as cinder_keymgr_confkeymgr
from cinder.message import api as cinder_message_api
from cinder import quota as cinder_quota
from cinder.scheduler import driver as cinder_scheduler_driver
from cinder.scheduler import host_manager as cinder_scheduler_hostmanager
from cinder.scheduler import manager as cinder_scheduler_manager
from cinder.scheduler import scheduler_options as \
cinder_scheduler_scheduleroptions
from cinder.scheduler.weights import capacity as \
cinder_scheduler_weights_capacity
from cinder.scheduler.weights import volume_number as \
cinder_scheduler_weights_volumenumber
from cinder import service as cinder_service
from cinder import service_auth as cinder_serviceauth
from cinder import ssh_utils as cinder_sshutils
from cinder.transfer import api as cinder_transfer_api
from cinder.volume import api as cinder_volume_api
from cinder.volume import driver as cinder_volume_driver
from cinder.volume.drivers.ceph import rbd_iscsi as \
cinder_volume_drivers_ceph_rbdiscsi
from cinder.volume.drivers.datera import datera_iscsi as \
cinder_volume_drivers_datera_dateraiscsi
from cinder.volume.drivers.dell_emc.powerflex import driver as \
cinder_volume_drivers_dell_emc_powerflex_driver
from cinder.volume.drivers.dell_emc.powermax import common as \
cinder_volume_drivers_dell_emc_powermax_common
from cinder.volume.drivers.dell_emc.powerstore import driver as \
cinder_volume_drivers_dell_emc_powerstore_driver
from cinder.volume.drivers.dell_emc.powervault import common as \
cinder_volume_drivers_dell_emc_powervault_common
from cinder.volume.drivers.dell_emc.sc import storagecenter_common as \
cinder_volume_drivers_dell_emc_sc_storagecentercommon
from cinder.volume.drivers.dell_emc.unity import driver as \
cinder_volume_drivers_dell_emc_unity_driver
from cinder.volume.drivers.dell_emc.vnx import common as \
cinder_volume_drivers_dell_emc_vnx_common
from cinder.volume.drivers.dell_emc import xtremio as \
cinder_volume_drivers_dell_emc_xtremio
from cinder.volume.drivers.fujitsu.eternus_dx import eternus_dx_common as \
cinder_volume_drivers_fujitsu_eternus_dx_eternusdxcommon
from cinder.volume.drivers.fusionstorage import dsware as \
cinder_volume_drivers_fusionstorage_dsware
from cinder.volume.drivers.hitachi import hbsd_common as \
cinder_volume_drivers_hitachi_hbsdcommon
from cinder.volume.drivers.hitachi import hbsd_fc as \
cinder_volume_drivers_hitachi_hbsdfc
from cinder.volume.drivers.hitachi import hbsd_rest as \
cinder_volume_drivers_hitachi_hbsdrest
from cinder.volume.drivers.hpe import hpe_3par_common as \
cinder_volume_drivers_hpe_hpe3parcommon
from cinder.volume.drivers.huawei import common as \
cinder_volume_drivers_huawei_common
from cinder.volume.drivers.ibm import flashsystem_common as \
cinder_volume_drivers_ibm_flashsystemcommon
from cinder.volume.drivers.ibm import flashsystem_iscsi as \
cinder_volume_drivers_ibm_flashsystemiscsi
from cinder.volume.drivers.ibm import gpfs as cinder_volume_drivers_ibm_gpfs
from cinder.volume.drivers.ibm.ibm_storage import ds8k_proxy as \
cinder_volume_drivers_ibm_ibm_storage_ds8kproxy
from cinder.volume.drivers.ibm.ibm_storage import ibm_storage as \
cinder_volume_drivers_ibm_ibm_storage_ibmstorage
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_common as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvccommon
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_fc as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvcfc
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_iscsi as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvciscsi
from cinder.volume.drivers import infinidat as cinder_volume_drivers_infinidat
from cinder.volume.drivers.infortrend.raidcmd_cli import common_cli as \
cinder_volume_drivers_infortrend_raidcmd_cli_commoncli
from cinder.volume.drivers.inspur.as13000 import as13000_driver as \
cinder_volume_drivers_inspur_as13000_as13000driver
from cinder.volume.drivers.inspur.instorage import instorage_common as \
cinder_volume_drivers_inspur_instorage_instoragecommon
from cinder.volume.drivers.inspur.instorage import instorage_iscsi as \
cinder_volume_drivers_inspur_instorage_instorageiscsi
from cinder.volume.drivers.kaminario import kaminario_common as \
cinder_volume_drivers_kaminario_kaminariocommon
from cinder.volume.drivers.kioxia import kumoscale as \
cinder_volume_drivers_kioxia_kumoscale
from cinder.volume.drivers.lenovo import lenovo_common as \
cinder_volume_drivers_lenovo_lenovocommon
from cinder.volume.drivers import linstordrv as \
cinder_volume_drivers_linstordrv
from cinder.volume.drivers import lvm as cinder_volume_drivers_lvm
from cinder.volume.drivers.macrosan import driver as \
cinder_volume_drivers_macrosan_driver
from cinder.volume.drivers.netapp import options as \
cinder_volume_drivers_netapp_options
from cinder.volume.drivers.nexenta import options as \
cinder_volume_drivers_nexenta_options
from cinder.volume.drivers import nfs as cinder_volume_drivers_nfs
from cinder.volume.drivers import nimble as cinder_volume_drivers_nimble
from cinder.volume.drivers.open_e import options as \
cinder_volume_drivers_open_e_options
from cinder.volume.drivers.prophetstor import options as \
cinder_volume_drivers_prophetstor_options
from cinder.volume.drivers import pure as cinder_volume_drivers_pure
from cinder.volume.drivers import qnap as cinder_volume_drivers_qnap
from cinder.volume.drivers import quobyte as cinder_volume_drivers_quobyte
from cinder.volume.drivers import rbd as cinder_volume_drivers_rbd
from cinder.volume.drivers import remotefs as cinder_volume_drivers_remotefs
from cinder.volume.drivers.san.hp import hpmsa_common as \
cinder_volume_drivers_san_hp_hpmsacommon
from cinder.volume.drivers.san import san as cinder_volume_drivers_san_san
from cinder.volume.drivers.sandstone import sds_driver as \
cinder_volume_drivers_sandstone_sdsdriver
from cinder.volume.drivers import solidfire as cinder_volume_drivers_solidfire
from cinder.volume.drivers import storpool as cinder_volume_drivers_storpool
from cinder.volume.drivers.stx import common as \
cinder_volume_drivers_stx_common
from cinder.volume.drivers.synology import synology_common as \
cinder_volume_drivers_synology_synologycommon
from cinder.volume.drivers.toyou.acs5000 import acs5000_common as \
cinder_volume_drivers_toyou_acs5000_acs5000common
from cinder.volume.drivers.toyou.acs5000 import acs5000_iscsi as \
cinder_volume_drivers_toyou_acs5000_acs5000iscsi
from cinder.volume.drivers.veritas_access import veritas_iscsi as \
cinder_volume_drivers_veritas_access_veritasiscsi
from cinder.volume.drivers.vmware import vmdk as \
cinder_volume_drivers_vmware_vmdk
from cinder.volume.drivers import vzstorage as cinder_volume_drivers_vzstorage
from cinder.volume.drivers.windows import iscsi as \
cinder_volume_drivers_windows_iscsi
from cinder.volume.drivers.windows import smbfs as \
cinder_volume_drivers_windows_smbfs
from cinder.volume.drivers import zadara as cinder_volume_drivers_zadara
from cinder.volume import manager as cinder_volume_manager
from cinder.volume.targets import spdknvmf as cinder_volume_targets_spdknvmf
from cinder.wsgi import eventlet_server as cinder_wsgi_eventletserver
from cinder.zonemanager.drivers.brocade import brcd_fabric_opts as \
cinder_zonemanager_drivers_brocade_brcdfabricopts
from cinder.zonemanager.drivers.brocade import brcd_fc_zone_driver as \
cinder_zonemanager_drivers_brocade_brcdfczonedriver
from cinder.zonemanager.drivers.cisco import cisco_fabric_opts as \
cinder_zonemanager_drivers_cisco_ciscofabricopts
from cinder.zonemanager.drivers.cisco import cisco_fc_zone_driver as \
cinder_zonemanager_drivers_cisco_ciscofczonedriver
from cinder.zonemanager import fc_zone_manager as \
cinder_zonemanager_fczonemanager
def list_opts():
return [
('backend',
itertools.chain(
[cinder_cmd_volume.host_opt],
)),
('brcd_fabric_example',
itertools.chain(
cinder_zonemanager_drivers_brocade_brcdfabricopts.
brcd_zone_opts,
)),
('cisco_fabric_example',
itertools.chain(
cinder_zonemanager_drivers_cisco_ciscofabricopts.
cisco_zone_opts,
)),
('coordination',
itertools.chain(
cinder_coordination.coordination_opts,
)),
('DEFAULT',
itertools.chain(
cinder_api_common.api_common_opts,
[cinder_api_middleware_auth.use_forwarded_for_opt],
cinder_api_views_versions.versions_opts,
cinder_backup_api.backup_opts,
cinder_backup_chunkeddriver.backup_opts,
cinder_backup_driver.backup_opts,
cinder_backup_drivers_ceph.service_opts,
cinder_backup_drivers_gcs.gcsbackup_service_opts,
cinder_backup_drivers_glusterfs.glusterfsbackup_service_opts,
cinder_backup_drivers_nfs.nfsbackup_service_opts,
cinder_backup_drivers_posix.posixbackup_service_opts,
cinder_backup_drivers_s3.s3backup_service_opts,
cinder_backup_drivers_swift.swiftbackup_service_opts,
cinder_backup_manager.backup_manager_opts,
cinder_cmd_backup.backup_cmd_opts,
[cinder_cmd_volume.cluster_opt],
cinder_common_config.api_opts,
cinder_common_config.core_opts,
cinder_common_config.auth_opts,
cinder_common_config.backup_opts,
cinder_common_config.image_opts,
cinder_common_config.global_opts,
cinder_common_config.compression_opts,
cinder.compute.compute_opts,
cinder_context.context_opts,
cinder_db_api.db_opts,
cinder_db_api.backup_opts,
[cinder_db_base.db_driver_opt],
cinder_image_glance.image_opts,
cinder_image_glance.glance_core_properties_opts,
cinder_image_imageutils.image_opts,
cinder_message_api.messages_opts,
cinder_quota.quota_opts,
cinder_scheduler_driver.scheduler_driver_opts,
cinder_scheduler_hostmanager.host_manager_opts,
cinder_scheduler_manager.scheduler_manager_opts,
[cinder_scheduler_scheduleroptions.
scheduler_json_config_location_opt],
cinder_scheduler_weights_capacity.capacity_weight_opts,
cinder_scheduler_weights_volumenumber.
volume_number_weight_opts,
cinder_service.service_opts,
cinder_sshutils.ssh_opts,
cinder_transfer_api.volume_transfer_opts,
[cinder_volume_api.allow_force_upload_opt],
[cinder_volume_api.volume_host_opt],
[cinder_volume_api.volume_same_az_opt],
[cinder_volume_api.az_cache_time_opt],
cinder_volume_driver.volume_opts,
cinder_volume_driver.iser_opts,
cinder_volume_driver.nvmet_opts,
cinder_volume_driver.scst_opts,
cinder_volume_driver.backup_opts,
cinder_volume_driver.image_opts,
cinder_volume_drivers_datera_dateraiscsi.d_opts,
cinder_volume_drivers_fusionstorage_dsware.volume_opts,
cinder_volume_drivers_infortrend_raidcmd_cli_commoncli.
infortrend_opts,
cinder_volume_drivers_inspur_as13000_as13000driver.
inspur_as13000_opts,
cinder_volume_drivers_inspur_instorage_instoragecommon.
instorage_mcs_opts,
cinder_volume_drivers_inspur_instorage_instorageiscsi.
instorage_mcs_iscsi_opts,
cinder_volume_drivers_kioxia_kumoscale.KUMOSCALE_OPTS,
cinder_volume_drivers_open_e_options.jdss_connection_opts,
cinder_volume_drivers_open_e_options.jdss_iscsi_opts,
cinder_volume_drivers_open_e_options.jdss_volume_opts,
cinder_volume_drivers_sandstone_sdsdriver.sds_opts,
cinder_volume_drivers_toyou_acs5000_acs5000common.
acs5000c_opts,
cinder_volume_drivers_toyou_acs5000_acs5000iscsi.
acs5000_iscsi_opts,
cinder_volume_drivers_veritas_access_veritasiscsi.VA_VOL_OPTS,
cinder_volume_manager.volume_manager_opts,
cinder_wsgi_eventletserver.socket_opts,
)),
('fc-zone-manager',
itertools.chain(
cinder_zonemanager_drivers_brocade_brcdfczonedriver.brcd_opts,
cinder_zonemanager_drivers_cisco_ciscofczonedriver.cisco_opts,
cinder_zonemanager_fczonemanager.zone_manager_opts,
)),
('key_manager',
itertools.chain(
cinder_keymgr_confkeymgr.key_mgr_opts,
)),
('service_user',
itertools.chain(
cinder_serviceauth.service_user_opts,
loading.get_auth_plugin_conf_options('v3password'),
loading.get_session_conf_options(),
)),
('backend_defaults',
itertools.chain(
cinder_volume_driver.volume_opts,
cinder_volume_driver.iser_opts,
cinder_volume_driver.nvmet_opts,
cinder_volume_driver.scst_opts,
cinder_volume_driver.image_opts,
cinder_volume_driver.fqdn_opts,
cinder_volume_drivers_ceph_rbdiscsi.RBD_ISCSI_OPTS,
cinder_volume_drivers_dell_emc_powerflex_driver.
powerflex_opts,
cinder_volume_drivers_dell_emc_powermax_common.powermax_opts,
cinder_volume_drivers_dell_emc_powerstore_driver.
POWERSTORE_OPTS,
cinder_volume_drivers_dell_emc_powervault_common.common_opts,
cinder_volume_drivers_dell_emc_powervault_common.iscsi_opts,
cinder_volume_drivers_dell_emc_sc_storagecentercommon.
common_opts,
cinder_volume_drivers_dell_emc_unity_driver.UNITY_OPTS,
cinder_volume_drivers_dell_emc_vnx_common.VNX_OPTS,
cinder_volume_drivers_dell_emc_xtremio.XTREMIO_OPTS,
cinder_volume_drivers_fujitsu_eternus_dx_eternusdxcommon.
FJ_ETERNUS_DX_OPT_opts,
cinder_volume_drivers_hitachi_hbsdcommon.COMMON_VOLUME_OPTS,
cinder_volume_drivers_hitachi_hbsdfc.FC_VOLUME_OPTS,
cinder_volume_drivers_hitachi_hbsdrest.REST_VOLUME_OPTS,
cinder_volume_drivers_hpe_hpe3parcommon.hpe3par_opts,
cinder_volume_drivers_huawei_common.huawei_opts,
cinder_volume_drivers_ibm_flashsystemcommon.flashsystem_opts,
cinder_volume_drivers_ibm_flashsystemiscsi.
flashsystem_iscsi_opts,
cinder_volume_drivers_ibm_gpfs.gpfs_opts,
cinder_volume_drivers_ibm_gpfs.gpfs_remote_ssh_opts,
cinder_volume_drivers_ibm_ibm_storage_ds8kproxy.ds8k_opts,
cinder_volume_drivers_ibm_ibm_storage_ibmstorage.driver_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvccommon.
storwize_svc_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvcfc.
storwize_svc_fc_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvciscsi.
storwize_svc_iscsi_opts,
cinder_volume_drivers_infinidat.infinidat_opts,
cinder_volume_drivers_kaminario_kaminariocommon.
kaminario_opts,
cinder_volume_drivers_lenovo_lenovocommon.common_opts,
cinder_volume_drivers_lenovo_lenovocommon.iscsi_opts,
cinder_volume_drivers_linstordrv.linstor_opts,
cinder_volume_drivers_lvm.volume_opts,
cinder_volume_drivers_macrosan_driver.config.macrosan_opts,
cinder_volume_drivers_netapp_options.netapp_proxy_opts,
cinder_volume_drivers_netapp_options.netapp_connection_opts,
cinder_volume_drivers_netapp_options.netapp_transport_opts,
cinder_volume_drivers_netapp_options.netapp_basicauth_opts,
cinder_volume_drivers_netapp_options.netapp_cluster_opts,
cinder_volume_drivers_netapp_options.netapp_provisioning_opts,
cinder_volume_drivers_netapp_options.netapp_img_cache_opts,
cinder_volume_drivers_netapp_options.netapp_nfs_extra_opts,
cinder_volume_drivers_netapp_options.netapp_san_opts,
cinder_volume_drivers_netapp_options.netapp_replication_opts,
cinder_volume_drivers_netapp_options.netapp_support_opts,
cinder_volume_drivers_nexenta_options.NEXENTA_CONNECTION_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_ISCSI_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_DATASET_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_NFS_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_RRMGR_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_EDGE_OPTS,
cinder_volume_drivers_nfs.nfs_opts,
cinder_volume_drivers_nimble.nimble_opts,
cinder_volume_drivers_prophetstor_options.DPL_OPTS,
cinder_volume_drivers_pure.PURE_OPTS,
cinder_volume_drivers_qnap.qnap_opts,
cinder_volume_drivers_quobyte.volume_opts,
cinder_volume_drivers_rbd.RBD_OPTS,
cinder_volume_drivers_remotefs.nas_opts,
cinder_volume_drivers_remotefs.volume_opts,
cinder_volume_drivers_san_hp_hpmsacommon.common_opts,
cinder_volume_drivers_san_hp_hpmsacommon.iscsi_opts,
cinder_volume_drivers_san_san.san_opts,
cinder_volume_drivers_solidfire.sf_opts,
cinder_volume_drivers_storpool.storpool_opts,
cinder_volume_drivers_stx_common.common_opts,
cinder_volume_drivers_stx_common.iscsi_opts,
cinder_volume_drivers_synology_synologycommon.cinder_opts,
cinder_volume_drivers_vmware_vmdk.vmdk_opts,
cinder_volume_drivers_vzstorage.vzstorage_opts,
cinder_volume_drivers_windows_iscsi.windows_opts,
cinder_volume_drivers_windows_smbfs.volume_opts,
cinder_volume_drivers_zadara.zadara_opts,
cinder_volume_manager.volume_backend_opts,
cinder_volume_targets_spdknvmf.spdk_opts,
)),
('nova',
itertools.chain(
cinder_compute_nova.nova_opts,
cinder_compute_nova.nova_session_opts,
cinder_compute_nova.nova_auth_opts,
)),
]
| 53.473171 | 78 | 0.751368 |
0a88c7c716816d4c2be37cf6fdc2770e478a9e5c | 1,907 | py | Python | pyhdtoolkit/optics/ripken.py | fsoubelet/PyhDToolk | aef9d5f1fac2d4c4307d50ba5c53f6a5eb635a66 | [
"MIT"
] | 5 | 2020-05-28T09:16:01.000Z | 2021-12-27T18:59:15.000Z | pyhdtoolkit/optics/ripken.py | fsoubelet/PyhDToolk | aef9d5f1fac2d4c4307d50ba5c53f6a5eb635a66 | [
"MIT"
] | 71 | 2020-02-20T20:32:43.000Z | 2022-03-24T17:04:28.000Z | pyhdtoolkit/optics/ripken.py | fsoubelet/PyhDToolk | aef9d5f1fac2d4c4307d50ba5c53f6a5eb635a66 | [
"MIT"
] | 2 | 2021-09-28T16:01:06.000Z | 2022-03-16T19:04:23.000Z | from typing import Union
import numpy as np
from loguru import logger
# ----- Setup Utilites ----- #
def lebedev_beam_size(
beta1_: Union[float, np.ndarray], beta2_: Union[float, np.ndarray], geom_emit_x: float, geom_emit_y: float
) -> Union[float, np.ndarray]:
"""
Calculate beam size according to the Lebedev-bogacz formula, based on the Ripken-Mais
Twiss parameters. The implementation is that of Eq. (A.3.1) in FERMILAB-PUB-10-383-AD, avaliable at the
following link: https://arxiv.org/ftp/arxiv/papers/1207/1207.5526.pdf
Args:
beta1_ (Union[float, np.ndarray]): value(s) for the beta1x or beta1y Ripken parameter.
beta2_ (Union[float, np.ndarray]): value(s) for the beta2x or beta2y Ripken parameter.
geom_emit_x (float): geometric emittance of the horizontal plane.
geom_emit_y (float): geometric emittante of the vertical plane.
Returns:
The beam size (horizontal or vertical) according to Lebedev & Bogacz, as sqrt(epsx *
beta1_^2 + epsy * beta2_^2).
"""
logger.trace("Computing beam size according to Lebedev formula: sqrt(epsx * b1_^2 + epsy * b2_^2)")
return np.sqrt(geom_emit_x * beta1_ + geom_emit_y * beta2_)
def _beam_size(coordinates_distribution: np.ndarray, method: str = "std") -> float:
"""
Compute beam size from particle coordinates.
Args:
coordinates_distribution (np.ndarray): ensemble of coordinates of the particle distributon.
method (str): the method of calculation to use, either 'std' (using the standard deviation as the
beam size) or 'rms' (root mean square).
Returns:
The computed beam size.
"""
if method == "std":
return coordinates_distribution.std()
elif method == "rms":
return np.sqrt(np.mean(np.square(coordinates_distribution)))
raise NotImplementedError(f"Invalid method provided")
| 38.918367 | 110 | 0.687992 |
f6de2dc89f109e2092b22980ff4d67063804c0df | 385 | py | Python | backend/api/views.py | fernand0aguilar/redesigned-invention | 89219f5fc14194ff0891fc52ded0908010b585c5 | [
"MIT"
] | null | null | null | backend/api/views.py | fernand0aguilar/redesigned-invention | 89219f5fc14194ff0891fc52ded0908010b585c5 | [
"MIT"
] | 9 | 2021-03-09T21:20:34.000Z | 2021-10-06T06:24:00.000Z | backend/api/views.py | fernand0aguilar/redesigned-invention | 89219f5fc14194ff0891fc52ded0908010b585c5 | [
"MIT"
] | null | null | null | from rest_framework import generics
from .models import Votacao
from .serializers import VotacaoSerializer
# Create your views here.
class ListVotacao(generics.ListCreateAPIView):
queryset = Votacao.objects.all()
serializer_class = VotacaoSerializer
class DetailVote(generics.ListCreateAPIView):
queryset = Votacao.objects.all()
serializer_class = VotacaoSerializer
| 27.5 | 46 | 0.802597 |
f4a1b3e010f0a44283f49e5930d0fb6972275941 | 7,839 | py | Python | examples/twisted/wamp/work/newapi/test_newapi11.py | admcnally/python | 7880cfc15821c1e942c54862b7fe470e7446c27e | [
"MIT"
] | 11 | 2016-09-14T21:58:37.000Z | 2019-01-28T21:56:14.000Z | examples/twisted/wamp/work/newapi/test_newapi11.py | admcnally/python | 7880cfc15821c1e942c54862b7fe470e7446c27e | [
"MIT"
] | 1 | 2018-10-19T09:21:26.000Z | 2018-10-19T09:21:26.000Z | examples/twisted/wamp/work/newapi/test_newapi11.py | admcnally/python | 7880cfc15821c1e942c54862b7fe470e7446c27e | [
"MIT"
] | 1 | 2019-12-26T21:33:12.000Z | 2019-12-26T21:33:12.000Z | @coroutine
def main(reactor, session):
# the session is joined and ready
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
# as we exit, this signals we are done with the session! the session
# can be recycled
if __name__ == '__main__':
client = Client(main=main)
react(client.run)
@coroutine
def setup(reactor, session):
# the session is joined and ready also!
def add2(a, b):
return a + b
yield session.register(u'com.example.add2', add2)
print('procedure registered')
# as we exit, this signals we are ready! the session must be kept.
if __name__ == '__main__':
client = Client(setup=setup)
react(client.run)
@coroutine
def client_main(reactor, client):
@coroutine
def transport_main(reactor, transport):
@coroutine
def session_main(reactor, session):
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
# returns when the session_main has finished (!), the session
# calls leave() or the underlying transport closes
yield transport.join(session_main, transport)
# returns when the transport_main won't reconnect
yield client.connect(transport_main)
if __name__ == '__main__':
client = Client(client_main=client_main)
react(client.run)
@coroutine
def session_main(reactor, session):
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
if __name__ == '__main__':
client = Client(session_main=session_main)
react(client.run)
@coroutine
def session_main(reactor, session):
def add2(a, b):
return a + b
yield session.register(u'com.example.add2', add2)
print('procedure registered')
txaio.return_value(txaio.create_future())
if __name__ == '__main__':
client = Client(session_main=session_main)
react(client.run)
@coroutine
def main1(reactor, session, details):
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
yield session.leave()
if __name__ == '__main__':
# hooking into on_join is the highest-level API -
# the user callback will fire with a joined session ready to use
# both the transport auto-reconnection logic and the session creation
# defaults in Client are reused
client = Client(on_join=main1)
react(client.run)
@coroutine
def main1(reactor, transport, details):
# transport.join() yields a joined session object when successful
session = yield transport.join(details.config.realm)
# the session is joined and can be used
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
yield session.leave()
if __name__ == '__main__':
# hooking into on_connect is a medium-level API -
# the user callback will fire with a connected transport which
# can be used to create new sessions from. the auto-reconnection
# logic in Client is reused. user code can reuse a transport while
# joining/leaving multiple times. with a multiplexing capable transport,
# user code may even create multiple concurrent sessions.
client = Client(on_open=main1)
react(client.run)
@coroutine
def main1(reactor, client, details):
# client.open() yields a connected transport when successful
transport = yield client.open()
# create a session running over the transport
session = yield transport.join(config.realm)
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
yield session.leave()
yield transport.close()
if __name__ == '__main__':
# hookinh into on_create is a low-level API - the user callback
# will fire with a created client, and the user code can
# control the whole transport and session creation, connection and
# reconnection process.
client = Client(on_create=main1)
react(client.run)
@coroutine
def main1(reactor, client, config):
transport = yield client.open()
session = yield transport.join(config.realm)
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
yield session.leave()
yield transport.close()
if __name__ == '__main__':
# hookinh into on_create is a low-level API - the user callback
# will fire with a created client, and the user code can
# control the whole transport and session creation, connection and
# reconnection process.
client = Client(on_create=main1)
react(client.run)
@coroutine
def main1(reactor, client, config):
while True:
delay = client.next_delay()
if delay:
yield sleep(delay)
else:
break
try:
# client.open() yields a connected WAMP transport
with yield client.open() as transport:
try:
with yield transport.join(config.realm) as session:
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
except Exception as e:
pass
except Exception as e:
pass
if __name__ == '__main__':
# hookinh into on_create is a low-level API - the user callback
# will fire with a created client, and the user code can
# control the whole transport and session creation, connection and
# reconnection process.
client = Client(on_create=main1)
react(client.run)
@coroutine
def main2(reactor, connection):
# create a new transport from the connection
transport = yield connection.open()
# create a new session running on the transport
session = yield transport.join(connection.config.realm)
# now register a procedure
def add2(a, b):
return a + b
yield session.register(u'com.example.add2', add2)
# and call the procedure
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
# now leave the realm, which frees the underlying transport
# but freeze the session
yield session.leave(freeze=True)
# .. sleep, but not too long, otherwise router finally kills the session.
yield sleep(60)
# create a second, new transport from the connection
# this might be a 2nd TCP connection or a 2nd logical WAMP transport running
# over a single, multiplexed connection
transport2 = yield connection.open()
# now resume the session on the new transport. using the session token mechanism,
# the router will resume the session and deliver buffered events/calls to the
# resumed session
yield session.resume(transport2)
# create a 2nd session running over the 1st transport
session2 = transport.join(connection.config.realm)
# call the procedure registered on the (resumed) session running on transport2
result = yield session.call(u'com.example.add2', 2, 3)
print('result={}'.format(result))
# if the transport supports multiplexing, multiple session can run
# concurrently over the underlying transport
if transport.is_multiplexed:
session3 = yield transport.join(connection.config.realm)
# now finally leave sessions ..
yield session.leave()
yield session2.leave()
# .. and close the transports
yield transport.close()
yield transport2.close()
if __name__ == '__main__':
transports = [
{
'type': 'rawsocket',
'serializer': 'msgpack',
'endpoint': {
'type': 'unix',
'path': '/tmp/cb1.sock'
}
}
]
config = Config(realm=u'myrealm1')
connection = Connection(main, transports=transports, config=config)
react(connection.start)
| 29.806084 | 85 | 0.668708 |
218e824ed0638c8c890d7adf3d9c7ed743aa95cf | 769 | py | Python | src/config/dbInit.py | rafaellevissa/codeshow | 4dfe67aea7de9a0d49f69fc60e02b1017e6a43c4 | [
"Beerware"
] | 1 | 2020-08-07T18:17:44.000Z | 2020-08-07T18:17:44.000Z | src/config/dbInit.py | rafaellevissa/codeshow | 4dfe67aea7de9a0d49f69fc60e02b1017e6a43c4 | [
"Beerware"
] | null | null | null | src/config/dbInit.py | rafaellevissa/codeshow | 4dfe67aea7de9a0d49f69fc60e02b1017e6a43c4 | [
"Beerware"
] | null | null | null | from config.db import Db
class DbInit(object):
database = Db()
def iniciar(self):
db = self.database.getDb()
print(db)
MAIN_DB = db.cursor()
try:
MAIN_DB.execute("drop database if exists account")
MAIN_DB.execute("create database account")
MAIN_DB.execute("use account")
MAIN_DB.execute('CREATE TABLE `users`(`id` int AUTO_INCREMENT PRIMARY KEY, `nome` varchar(20), `email` varchar(250), `cidade` varchar(100),`uf` varchar(3))')
db.commit()
return "[{ \"message\":\"Database create new account table done\"}]"
except:
return "[{ \"message\":\"Erro ao iniciar o banco\"}]"
| 38.45 | 181 | 0.53316 |
5f06cb6cc24ac0a6008d5ca54686d5918747e577 | 1,245 | py | Python | tests/unit/collectible_test.py | JosephAkim/PetNFT | b2d16d6eda8482aae43e52f752de2fa7af1ed75b | [
"MIT"
] | null | null | null | tests/unit/collectible_test.py | JosephAkim/PetNFT | b2d16d6eda8482aae43e52f752de2fa7af1ed75b | [
"MIT"
] | null | null | null | tests/unit/collectible_test.py | JosephAkim/PetNFT | b2d16d6eda8482aae43e52f752de2fa7af1ed75b | [
"MIT"
] | null | null | null | import pytest
from brownie import network, AdvancedCollectible
from scripts.Useful_scripts import (
get_account,
get_contract,
LOCAL_BLOCKCHAIN_ENVIRONMENTS,
)
def test_can_create_advanced_collectible(
get_keyhash,
chainlink_fee,
):
# Arrange
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
pytest.skip("Only for local testing")
advanced_collectible = AdvancedCollectible.deploy(
get_contract("vrf_coordinator").address,
get_contract("link_token").address,
get_keyhash,
{"from": get_account()},
)
get_contract("link_token").transfer(
advanced_collectible.address, chainlink_fee * 3, {"from": get_account()}
)
# Act
transaction_receipt = advanced_collectible.createCollectible(
"None", {"from": get_account()}
)
requestId = transaction_receipt.events["requestedCollectible"]["requestId"]
assert isinstance(transaction_receipt.txid, str)
get_contract("vrf_coordinator").callBackWithRandomness(
requestId, 777, advanced_collectible.address, {"from": get_account()}
)
# Assert
assert advanced_collectible.tokenCounter() > 0
assert isinstance(advanced_collectible.tokenCounter(), int)
| 32.763158 | 80 | 0.717269 |
f077e907c7235e5708afd9c4b1bdaa1c4bb3952f | 5,085 | py | Python | mouse_tracking/python_code/Batch_OFT.py | li-shen-amy/lick_detect | 7e82025aff0691c7f36bbea38a97349468bc3c4e | [
"MIT"
] | 1 | 2021-02-26T00:02:24.000Z | 2021-02-26T00:02:24.000Z | mouse_tracking/python_code/Batch_OFT.py | li-shen-amy/lick_detect | 7e82025aff0691c7f36bbea38a97349468bc3c4e | [
"MIT"
] | 1 | 2022-03-31T08:11:19.000Z | 2022-03-31T08:11:19.000Z | mouse_tracking/python_code/Batch_OFT.py | li-shen-amy/behavior | 7e82025aff0691c7f36bbea38a97349468bc3c4e | [
"MIT"
] | null | null | null | '''
# Requiment: Python 3, opencv-python, tkinter
# Usage instruction:
Select downsampled video file (recommend 480p x 480p for faster speed)
Once finished, video with tracking trace and excel sheet appear in source folder for video
For excel file: three columns, first is x, second is y coord, third is pixel distance traveled between frames
For the third column, value is always incorrect, so change to zero.
Plot x and y columns for trace of motion graph (line graph)
Plot motion column for speed trace. Convert y-axis to inches per frame by measure how many pixels per inch in video (and convert speed to secs using 30 fps conversion).
Delete orginal excel file for a given video if you want to re-run the program analysis for the same video (it will just add data to the original excel file making it really long).
'''
import cv2
import numpy as np
#import matplotlib.pyplot as plt
import os
from math import hypot
from tkinter import Tk
from tkinter.filedialog import askopenfilenames
root1 = Tk()
filez = askopenfilenames(parent = root1, title = 'Choose file')
Total_distance=0
for fullFileName in root1.tk.splitlist(filez):
filename = fullFileName
(root, ext) =os.path.splitext(filename)
print(root)
duration = 1 # second
freq = 440 # Hz
#mouse_cascade = cv2.CascadeClassifier('mouse_body_cascade.xml')
cap = cv2.VideoCapture(filename)
Moving_track = [(0,0)]
font = cv2.FONT_HERSHEY_SIMPLEX
# export video setting
width = int(cap.get(3))
height = int(cap.get(4))
fourcc = cv2.VideoWriter_fourcc('m','p','4','v')
out = cv2.VideoWriter(root+'_out_half_widthCenterOFT.mp4',fourcc,30,(width,height))
Peak_speed = 0
while not cap.isOpened():
cap = cv2.VideoCapture(filename)
cv2.waitKey(1000)
#os.system('play --no-show-progress --null --channels 1 synth %s sine %f' % (duration, freq))
print("Can't load the file")
break
a=[(0,0)]
i=0
Counting = 0
record = False
while True:
i+=1
ret, img_raw =cap.read() #start capture images from webcam
if ret == False:
break
img_gray = cv2.cvtColor(img_raw,cv2.COLOR_BGR2GRAY)
y_start = 1
y_stop = 480
x_start = 1
x_stop = 480
x_start_region4Counting = 120
x_stop_region4Counting = 360
y_start_region4Counting = 120
y_stop_region4Counting = 360
blur = cv2.GaussianBlur(img_gray,(5,5),0)
retval,img_bi = cv2.threshold(blur,30,255,cv2.THRESH_BINARY_INV)
kernel = np.ones((7, 7), np.uint8)
img_bi = cv2.erode(img_bi,kernel)
contours,hierarchy = cv2.findContours(img_bi.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
# only proceed if at least one contour was found
if len(contours) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(contours, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
try:
M = cv2.moments(c)
center = ((M["m10"] / M["m00"])+(y_start), (M["m01"] / M["m00"])+(x_start))
if radius >10:
prev_center= center
else:
center = prev_center
except ZeroDivisionError:
center = prev_center
print("ZeroDivisionError")
else:
center = prev_center
print('not detected')
d_dist = hypot(Moving_track[-1][0]-center[0],Moving_track[-1][1]-center[1])
Total_distance = Total_distance + d_dist
Speed = (d_dist/0.04)*0.075
temp = "{:0>.2f}".format(Speed)
Moving_track.append(center)
points = np.array(Moving_track)
cv2.polylines(img_raw,np.int32([points[1:]]),0,(0,0,255))
Counting_temp = (float(center[0]) > x_start_region4Counting) & (float(center[0]) < x_stop_region4Counting) & (float(center[1]) > y_start_region4Counting) & (float(center[1]) < y_stop_region4Counting)
Counting+=Counting_temp
percentage_in_region4Counting = Counting/i
temp_percent = "{:0>.2f}".format(percentage_in_region4Counting)
line = str(center[0])+','+str(center[1])+','+str(Speed)+','+str(temp_percent)+','+str(Counting_temp)+'\n'
cv2.putText(img_raw,str(Counting_temp),(50,50),font,1,(255,0,0),2,cv2.LINE_AA)
cv2.rectangle(img_raw,(x_start_region4Counting,y_start_region4Counting),(x_stop_region4Counting,y_stop_region4Counting),(255,0,0))
cv2.imshow(r'img',img_raw)
out.write(img_raw)
with open(root+r'_trackTrace.csv','a') as f:
f.write(line)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
print(fullFileName)
print(temp_percent)
print(Total_distance)
print("Processing Done!")
cv2.destroyAllWindows()
out.release()
| 38.522727 | 215 | 0.629105 |
f85e15178a415d6d3645b8eafd13c1acb8fba609 | 61,376 | py | Python | riboSeed/riboSpec.py | nickp60/riboSeed | 636eaf78a1bbe4517c43ddb120e5ca5bb2b97212 | [
"MIT"
] | 7 | 2017-02-04T14:33:36.000Z | 2021-02-14T21:03:33.000Z | riboSeed/riboSpec.py | nickp60/riboSeed | 636eaf78a1bbe4517c43ddb120e5ca5bb2b97212 | [
"MIT"
] | 11 | 2017-03-13T15:23:20.000Z | 2020-06-30T17:34:56.000Z | riboSeed/riboSpec.py | nickp60/riboSeed | 636eaf78a1bbe4517c43ddb120e5ca5bb2b97212 | [
"MIT"
] | 3 | 2017-08-10T12:14:22.000Z | 2022-03-27T14:35:12.000Z | #!/usr/bin/env python3
#-*- coding: utf-8 -*-
# Copyright 2017, National University of Ireland and The James Hutton Insitute
# Author: Nicholas Waters
#
# This code is part of the riboSeed package, and is governed by its licence.
# Please see the LICENSE file that should have been included as part of
# this package.
"""
Created on Wed Jan 17 14:44:30 2018
We have developed a scheme by which to predict the number of rDNAs in a
genome based on an assembly graph created by spades.
"""
DEBUG = False
######
# The following makes profiling some of the operations easier.
# This will be removed before riboSeed v1.0.0
####
if DEBUG:
from line_profiler import LineProfiler
def do_profile(follow=[]):
def inner(func):
def profiled_func(*args, **kwargs):
try:
profiler = LineProfiler()
profiler.add_function(func)
for f in follow:
profiler.add_function(f)
profiler.enable_by_count()
return func(*args, **kwargs)
finally:
profiler.print_stats()
return profiled_func
return inner
else:
def do_profile(follow=[]):
"Helpful if you accidentally leave in production!"
def inner(func):
def nothing(*args, **kwargs):
return func(*args, **kwargs)
return nothing
return inner
# DEBUG = True
# PLOT = False
# PLOT = True
import sys
import os
import re
import random
import math
import glob
import subprocess
import argparse
import multiprocessing
try:
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot, patches
PLOT = True
except Exception as e: # likely an ImportError, but not taking chances
sys.stderr.write(e)
sys.stderr.write("\nlooks like you have some issue with matplotlib. " +
"Classic matplotlib, amirite? Plotting is disabled\n")
PLOT = False
import networkx as nx
from .shared_methods import set_up_logging, make_barrnap_cmd
class FastgNode(object):
def __init__(self, name=None, length=None, cov=None,
reverse_complimented=None, neighbor_list=None, raw=None):
# self.index = next(FastgNode.newid)
self.name = name
self.length = length
self.cov = cov
self.neighbor_list = neighbor_list
self.reverse_complimented = reverse_complimented
self.raw = raw
def __str__(self):
return str("Node: {0}\nNeighbors: {1}\nLength: {2}\nCoverage: " +
"{3}\nReverse_Complimented?: {4}\nRaw: {5} ").format(
self.name,
"None" if self.neighbor_list is None else
",".join([str(x.name) for x in self.neighbor_list]),
"None" if self.length is None else self.length,
"None" if self.cov is None else self.cov,
str(self.reverse_complimented),
"None" if self.raw is None else self.raw,
)
def get_args(test_args=None): # pragma: no cover
"""
"""
parser = argparse.ArgumentParser(
prog="ribo spec",
description="Given either an assembly graph or a mapping file " +
"and reference, determine whether the number of rDNAs appears " +
"to match the reference",
add_help=False) # to allow for custom help
parser.prog = "ribo spec"
requiredNamed = parser.add_argument_group('required named arguments')
requiredNamed.add_argument(
"-o", "--output", dest='output', action="store",
help="output directory; " +
"default: %(default)s",
type=str, required=True)
requiredNamed.add_argument(
"-g", "--assembly_graph",
dest='assembly_graph',
action="store", default='', type=str,
# metavar="assembly_graph.fastg/SPAdes_dir",
help="fastg assembly graph from SPAdes or a SPAdes output directory." +
" If the latter, riboSpec will be run on both the final assembly " +
"graph, and all the intermediate graphs for each k-mer.",
required=True)
optional = parser.add_argument_group('optional arguments')
optional.add_argument("--plot_graphs", dest='plot_graphs',
help="draw the network graphs ",
action="store_true")
optional.add_argument("-v", "--verbosity", dest='verbosity',
action="store",
default=2, type=int, choices=[1, 2, 3, 4, 5],
help="Logger writes debug to file in output dir; " +
"this sets verbosity level sent to stderr. " +
" 1 = debug(), 2 = info(), 3 = warning(), " +
"4 = error() and 5 = critical(); " +
"default: %(default)s")
optional.add_argument("-m", "--min_contig_len", dest="min_contig_len",
action="store", default=75,
help="Contigs under this length will be collapsed;" +
" default: %(default)s")
optional.add_argument("-a", "--min_anchor_length", dest="min_anchor_length",
action="store", default=500,
help="Paths must contain at least one node this " +
"long as an anchor;" +
" default: %(default)s")
optional.add_argument("-f", "--medium_length_threshold",
dest="medium_length_threshold",
action="store", default=400,
help="Paths are simplified when contigs are greater than " +
"the --min_contig_length, but still short. These " +
"medium-length contigs may be assembly artificts or " +
"otherwise irrelevent. IF you dont want this filtering " +
"applied, set to the same value as --min_contig_len;" +
" default: %(default)s")
optional.add_argument("-t", "--threshold", dest="threshold",
action="store", default=1500,
help="paths must be at least this long (bp) to be " +
"considered; default: %(default)s")
optional.add_argument("-b", "--barrnap_length_threshold", dest="barrnap_length_threshold",
action="store", default=.75,
type=float,
help="This gets passed to barrnap's --lencutoff " +
"argument, for determining what we should treat " +
"as a legitimate hmm hit; default: %(default)s")
optional.add_argument("--barrnap_exe", dest="barrnap_exe",
action="store", default="barrnap",
help="Path to barrnap executable;" +
" default: %(default)s")
optional.add_argument("-c", "--cores", dest='cores', action="store",
default=None, type=int,
help="cores to be used" +
"; default: %(default)s")
optional.add_argument("-x", "--MAKE_ADJACENCY_MATRIX",
dest='MAKE_ADJACENCY_MATRIX', action="store_true",
help="generate and plot an adjacency matrix" +
"; default: %(default)s")
# # had to make this explicitly to call it a faux optional arg
optional.add_argument("-h", "--help",
action="help", default=argparse.SUPPRESS,
help="Displays this help message")
if test_args is None:
args = parser.parse_args(sys.argv[2:])
else:
args = parser.parse_args(test_args)
return args
def extract_node_len_cov_rc(node_name):
rc = False
if node_name.endswith("'"):
rc = True
node_name = node_name[0:-1]
p = re.compile(r'EDGE_(?P<node>\d*?)_length_(?P<length>\d*?)_cov_(?P<cov>[\d|\.]*)')
m = p.search(node_name)
return (m.group("node"), int(m.group("length")), float(m.group("cov")), rc)
def make_Node(name):
node_name, length, cov, rc = extract_node_len_cov_rc(name)
new_node = FastgNode(
name=int(node_name),
length=length,
cov=cov,
reverse_complimented=rc,
raw=name
)
return new_node
def make_adjacency_matrix(g):
""" Make an adjacency matrix from a dict of node: [neighbors] pairs
"""
keys=sorted(g.keys())
size=len(keys)
M = [ [0]*size for i in range(size) ]
"""
for a, row in g.items() iterates over the key:value entries in dictionary, and for b in row iterates over the values. If we used (a,b), this would have given us all the pairs.
(keys.index(a), keys.index(b)) But we need the index to assign to the corresponding matrix entry,
keys=sorted(g.keys()) that's why we extracted and sorted the keys.
for a,b in... getting the index entries and assigning value 1 or 2 based on diagonal element or not.
M = [ [0]*size for ... matrix cannot be used before initialization.
"""
for a,b in [(keys.index(a), keys.index(b)) for a, row in g.items() for b in row]:
M[a][b] = 2 if (a==b) else 1
return M
def plot_adjacency_matrix(G, node_order=None, partitions=[], colors=[], outpath=None): #pragma: no cover
"""
- G is an adjacency matrix
- node_order (optional) is a list of nodes, where each node in G
appears exactly once
- partitions is a list of node lists, where each node in G appears
in exactly one node list
- colors is a list of strings indicating what color each
partition should be
If partitions is specified, the same number of colors needs to be
specified.
"""
#Plot adjacency matrix in toned-down black and white
fig = pyplot.figure(figsize=(4, 4)) # in inches
pyplot.imshow(G,
cmap="Greys",
interpolation="none")
# The rest is just if you have sorted nodes by a partition and want to
# highlight the module boundaries
assert len(partitions) == len(colors)
ax = pyplot.gca()
for partition, color in zip(partitions, colors):
current_idx = 0
for module in partition:
ax.add_patch(patches.Rectangle((current_idx, current_idx),
len(module), # Width
len(module), # Height
facecolor="none",
edgecolor=color,
linewidth="1"))
current_idx += len(module)
fig.savefig(outpath)
def parse_fastg(f):
"""parse the headers from fastg file, return a Node objects, dict, and DiGraph
Note that at this stage no edges are added
"""
node_neighs = []
with open(f, "r") as inf:
for line in inf:
if line.startswith(">"):
colons = line.count(":")
# colons = sum([1 for x in line if x == ":" ])
if colons > 1:
sys.stderr.write("multiple ':'s found in line, and can only " +
"be used to separate nodes from neighbor " +
"list\n")
elif colons == 0:
# orphaned node or terminal node
# sys.stderr.write("Header does not contain a colon!\n")
# jk I couldnt care less about these nodes.
# node_neighs.append([line.strip().split(";")[0], None])
node_neighs.append([line.strip()[1:-1], []])
pass
else:
# loose the '>' at the beginning and the ';' at the end
node, neigh = line.strip()[1:-1].split(":")
node_neighs.append([node, neigh.split(",")])
## these should be the same length
# print(len([x[0] for x in node_neighs]))
# print(len(set([x[0] for x in node_neighs])))
g = {k: v for k, v in node_neighs}
# print([extract_node_len_cov_rc(name[0]) for name in node_neighs])
node_list = []
# make objects for each node and neighbor
for node, neighs in node_neighs:
new_node = make_Node(node)
if neighs is None:
new_node.neighbor_list = []
else:
new_node.neighbor_list = [make_Node(x) for x in neighs]
node_list.append(new_node)
# make the networkx object
DG = nx.DiGraph()
for N in node_list:
DG.add_node(N.name, cov=N.cov, length=N.length, raw=N.raw)
return (node_list, g, DG)
def plot_G(
G,
nodes5,
nodes16,
nodes23,
outpath, outpath2): # pragma: nocover
fig = pyplot.figure(figsize=(10, 10)) # in inches
pos = nx.layout.spring_layout(G, iterations=5)
node_sizes_raw = [h['length'] for g, h in G.nodes.data()]
maxsize = max(node_sizes_raw)
minsize = min(node_sizes_raw)
sizediff = maxsize - minsize
node_sizes = [10 + (30 * ((x - minsize)/sizediff)) for x in node_sizes_raw]
N = G.number_of_nodes()
M = G.number_of_edges()
_G = nx.line_graph(G)
_N = _G.number_of_nodes()
_M = _G.number_of_edges()
# print(N)
# print(M)
# print(_N)
# print(_M)
# edge_colors = range(2, M + 2)
# edge_alphas = [(5 + i) / (M + 4) for i in range(M)]
node_colors = ["lightgrey" for x in range(N)]
for i, (g, h) in enumerate(G.nodes.data()):
if g in nodes5:
node_colors[i] = "red"
if g in nodes16:
node_colors[i] = "blue"
if g in nodes23:
node_colors[i] = "green"
_edge_colors = ["lightgrey" for x in range(_M)]
for i, (to, frm, vals) in enumerate(_G.edges.data()):
x = [element for tupl in (to, frm) for element in tupl]
if len(set(x).intersection(nodes5)) > 0:
_edge_colors[i] = "red"
if len(set(x).intersection(nodes16)) > 0:
_edge_colors[i] = "blue"
if len(set(x).intersection(nodes23)) > 0:
_edge_colors[i] = "green"
# node_colors
nx.draw(G, with_labels=True, linewidths=0, node_size=node_sizes,
alpha=0.7, font_size=2, arrows=True,
node_color=node_colors, edge_color="darkgrey", width=.2)
ax = pyplot.gca()
ax.set_axis_off()
fig.savefig(outpath)
pyplot.close(fig)
fig = pyplot.figure(figsize=(10, 10)) # in inches
nx.draw(nx.line_graph(G), with_labels=True, linewidths=0, node_size=node_sizes,
alpha=0.7, font_size=2, arrows=False,
node_color="black", edge_color=_edge_colors, width=.2)
fig.savefig(outpath2)
pyplot.close(fig)
# def neighborhood_by_n(G, node, n):
# path_lengths = nx.single_source_dijkstra_path_length(G, node)
# return [node for node, length in path_lengths.items()
# if length == n]
def neighborhood_by_length(G, source, cutoff=20000, ignored_nodes=[]):
"""
I needed a way to see if a given node was within a certain distance from a source node by the shortest path. This could be done with the dijkstra_predecessor_and_distance function, but that rejects any path >= the cutoff, whereas I need to retain the ones where the cutoff occurs within the path too. So this takes a list of ALL the paths and their lengths (from the dictionaries returned by networkx's dijkstra_predecessor_and_distance used without a cutoff), and then recursivly iterates through the network.
for each node in the paths returned by dijkstra_predecessor_and_distance,
we run through the find_inclusive_paths_within_cutoff method, calls nodes
as either being interior (ie, within the cutoff), or on the border
(ie, the edge included the cutoff)
"""
# print(nx.single_source_dijkstra_path_length(G, source))
# print(path_nodes)
# print(path_lengths)
interior_nodes = [source]
border_nodes = []
nodesDict = dict(G.nodes(data=True))
# changed this from dijkstra path because that was getting the shortes path by number of intermnediate nodes. We can only weight nodes, not edges, so this was problematic
# paths_dict = {}
#### first, use single_source_dij to get all connected nodes.
# his lambda function replaces the built-in weight calculation, cause
# I want to use node length rather than some edge weight; edges in
# fastg are of unknown length
try:
# :( networkx >= 2.4
get_node_weight = lambda u,v,d: G.nodes[v].get('length', 0)
except AttributeError:
# :( networkx < 2.4
get_node_weight = lambda u,v,d: G.node[v].get('length', 0)
targets_path = nx.single_source_dijkstra(G, source, weight=get_node_weight)[1]
#### Then, for each path, we calculate the length, binning them into
#### either "internal" nodes (within the cutoff and "border" nodes (including the cutoff)
for target, path_to in targets_path.items():
# out.write(str(target) + " " + " ".join([str(x) for x in path_to]) + "\n")
# sys.exit()
# if 0:
path_len = 0
# ignore paths that travel through the "bad places"
if len(set(path_to).intersection(set(ignored_nodes))) > 0:
# print("ignoring path")
# print(path_to)
continue
for i, node in enumerate(path_to):
if i > 0: # ignore source
path_len = path_len + nodesDict[node]['length']
if path_len > cutoff:
border_nodes.append(node)
break
else:
interior_nodes.append(node)
# print(path_to)
# print(path_len)
# sys.exit()
return (set(interior_nodes), set(border_nodes))
def make_gff_list(gffpath):
gff_list = []
with open(gffpath, 'r') as g:
for idx, line in enumerate(g):
if idx == 0:
#gets rid of header line
pass
else:
gff_list.append(line.strip().split("\t"))
return gff_list
def find_rRNA_from_gffs(gff_list, partial=False, logger=None):
"""
this is a bit convoluted
for gff lines where the product has the nane of the rDNA (16S, 23S, etc),
we extract the node name (usually a number), which is the first thing
returned from the extract_node_len_cov function
"""
nodes16, nodes23, nodes5, = [], [], []
# print(gff_list)
for x in gff_list:
if ("partial" in x[8] and not partial):
continue
if "16S" in x[8]:
nodes16.append(int(extract_node_len_cov_rc(x[0])[0]))
if "23S" in x[8]:
nodes23.append(int(extract_node_len_cov_rc(x[0])[0]))
if "5S" in x[8]:
nodes5.append(int(extract_node_len_cov_rc(x[0])[0]))
nodes16, nodes23, nodes5 = list(set(nodes16)), list(set(nodes23)), list(set(nodes5))
return(nodes16, nodes23, nodes5)
def get_depth_of_big_nodes(G, threshold=5000):
""" return depths, eighted mean, and quartiles of weighted mean
Getting the simple mean ignores the length of the contigs. So, we weight
by the length.
"""
nodes_dict = dict(G.nodes(data=True))
lengths = []
depths = []
prods = []
stupid_normalized_list = []
for node, data in nodes_dict.items():
if data['length'] > threshold:
depths.append(data['cov'])
lengths.append(data['length'])
for i, l in enumerate(lengths):
prods.append(depths[i] * l)
# this is to get stats from incorporating some notion of weight
stupid_normalized_list.extend([depths[i] for x in range(math.ceil(l/1000))])
# print("normalized:")
# print(stupid_normalized_list)
totlen = sum(lengths)
ave = sum([x for x in prods]) /totlen
# print("Total length of nodes passing threshold: %i" % totlen)
# print("Average depth of contigs greater %i is %f" %(threshold, ave))
quarts = []
for quart in [.25, .5, .75]:
q = percentile(stupid_normalized_list,quart)
# print("%i th quartile: %f" % \
# (quart * 100, q))
quarts.append(q)
return(depths, ave, quarts)
def make_silly_boxplot(vals, outpath, names=None, title="", yline=None): #pragma: nocover
"""
yline can either be an number or a tuple: (25%, 50%, 75% quartiles)
"""
fig = pyplot.figure(figsize=(6, 6)) # in inches
ax = pyplot.axes()
if isinstance(vals[0], list):
pyplot.boxplot(vals, 0, 'rs', 1)
if names is not None:
assert len(names) == len(vals), \
"number of names does not equal number of sets!"
ax.set_xticklabels(names)
for i, data in enumerate(vals) :
for d in data:
pyplot.scatter(x=i + 1 + random.uniform(-.1, .2),
y=d + random.uniform(-.2, .2),
alpha=.3)
else:
pyplot.boxplot(vals, 0, 'rs', 1)
for d in vals:
pyplot.scatter(x=1 + random.uniform(-.1, .1),
y=d + random.uniform(-.2, .2),
alpha=.3)
if yline is not None:
if isinstance(yline, tuple):
# print(yline)
assert len(yline)== 3,\
"yline must either be a single number or a 3-lenght tuple"
pyplot.axhline(yline[0], color="green", linestyle="--"),
pyplot.axhline(yline[1], color="green"),
pyplot.axhline(yline[2], color="green", linestyle="--"),
else:
pyplot.axhline(yline, color="green"),
pyplot.title(title)
fig.savefig(outpath)
pyplot.close(fig)
def percentile(N, percent, key=lambda x:x):
"""
Find the percentile of a list of values.
# borrowed from http://code.activestate.com/recipes/511478-finding-the-percentile-of-the-values/
@parameter N - is a list of values.
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of N.
@return - the percentile of the values
"""
N.sort()
if not N:
return None
k = (len(N) - 1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(N[int(k)])
d0 = key(N[int(f)]) * (c - k)
d1 = key(N[int(c)]) * (k - f)
return d0 + d1
def get_matching_node(name, rc, node_list):
""" this seems redundant, but we need it to rebuild graphs.
the node list has the neighbor information, and often we have
just a name and a strand (from a neighbor list)
"""
for n in node_list:
# ...checking name and orientation same, and not already in graph
if name == n.name and \
rc == n.reverse_complimented:
return(n)
raise ValueError("matching node %s not found in list!" % name)
def node_name_and_strand_in_graph(node, G):
""" return true if a node is already in a graph, checking both strand and orientation
this
"""
if node.name in G.nodes():
for k, v in G.nodes(data=True):
if k == node.name:
if v["reverse_complimented"] == node.reverse_complimented:
return True
return False
def populate_subgraph_from_source(g, root, node_list, counter, length=0, cutoff=1000, debug=False):
# starting from node, examit its neighbors
nneighs = len(root.neighbor_list)
for i, neigh in enumerate(root.neighbor_list):
# counter for dbugging
if debug:
print(
"populating recursion depth %i parent %s: neighbor %s (%i of %i)" % \
(counter, root.name, neigh.name, i + 1, nneighs ))
try:
# we might get an error if there is only a one directional node, or because
# we are inly working with a subset of the full node list.
full_neigh = get_matching_node(name=neigh.name,
rc=neigh.reverse_complimented,
node_list=node_list)
except ValueError as e:
sys.stderr.write(str(e) + "\n")
break
if full_neigh.name in g.nodes():
# if already in the graph, just add the edge
g.add_edge(root.name, full_neigh.name)
if length >= cutoff:
break
else:
# if found, add that node and the appropriate edges
g.add_node(full_neigh.name,
cov=full_neigh.cov,
length=full_neigh.length,
reverse_complimented=full_neigh.reverse_complimented,
raw=full_neigh.raw)
g.add_edge(root.name, full_neigh.name)
# rinse and repeat, if node is not in the graph already
populate_subgraph_from_source(
g=g,
root=full_neigh,
node_list=node_list,
counter=counter + 1,
cutoff=cutoff,
length= length +full_neigh.length,
debug=debug)
def reverse_populate_subgraph_from_source(g, root, node_list, counter, length=0, cutoff=1000, debug=False):
# use this for debugging praticular nodes
# if root.name == 85 or root.name == 84:
# debug=True
# print(root)
# look through all nodes
for node in node_list:
# print(node.name)
# if g.has_edge(root.name, node.name):
# continue
# and each of thats node's neighbors
for neigh in node.neighbor_list:
# that list our root as its neighbor (with right orientation)
if neigh.name == root.name and \
neigh.reverse_complimented == root.reverse_complimented:
# if the node is already in the graph, we add an edge, but not
# a node, and we dont populate deeper from there
if node.name in g.nodes():
if debug:
print("adding edge" , root.name, node.name, "but not recursing")
g.add_edge(root.name, node.name)
else:
# check for length here rather than above because we want to retain medium sized bubbles.
if length >= cutoff:
break
if debug:
print("adding node", node.name, ", from node", root.name, " rev recursion depth %i" % counter)
g.add_node(node.name, cov=node.cov, length=node.length, raw=node.raw)
# we want to build the directionality opposite what it is
# currently, so we make the nodes going from the root to the node
g.add_edge(root.name, node.name)
# rinse and repeat
reverse_populate_subgraph_from_source(
g=g,
root=node,
node_list=node_list,
counter=counter + 1,
length=length + node.length,
cutoff=cutoff,
debug=debug)
def make_rRNAs_dict(gff_list, gff_list_partial):
solid16, solid23, solid5 = find_rRNA_from_gffs(gff_list, partial=False)
partial16, partial23, partial5 = find_rRNA_from_gffs(gff_list_partial, partial=True)
partial16 = [x for x in partial16 if x not in solid16]
partial23 = [x for x in partial23 if x not in solid23]
partial5 = [x for x in partial5 if x not in solid5]
rrnas = {
"16S": {
"partial": partial16,
"solid": solid16
},
"23S": {
"partial": partial23,
"solid": solid23
},
"5S": {
"partial": partial5,
"solid": solid5
}
}
return rrnas
def check_rrnas_dict(rrnas, logger=None):
""" this detects which rRNAs we have a solid location for, and
therefore which ones to check for path
Returns:
returns tuple of two Bools, for whether to check 16S, 23S, or both
"""
RUN_16S, RUN_23S = True, True
if len(rrnas["16S"]["solid"]) > 1:
logger.error(
"it appears that there multiple distinct 16S rDNAs in the " +
"assenbly graph; this tracing algorithm is not the best" +
"option. Please review the graph manually to determine" +
"probable number of rDNAs")
# raise ValueError
elif len(rrnas["16S"]["solid"]) < 1:
logger.error("Barrnap failed to detect any full 16S in the assembly graph")
# raise ValueError
RUN_16S = False
if len(rrnas["23S"]["solid"]) > 1:
logger.error(
"it appears that there are distinct 23S rDNAs in the " +
"assenbly graph; this tracing algorithm is not the best" +
"option. Please review the graph manually to determine" +
"probable number of rDNAs")
# raise ValueError
elif len(rrnas["23S"]["solid"]) < 1:
logger.error("Barrnap failed to detect any 23S in the assembly graph")
# raise ValueError
RUN_23S = False
# if not RUN_23S and not RUN_16S:
# logger.error("No full rRNAs found in assembly; exiting")
# raise ValueError
return (RUN_16S, RUN_23S)
def remove_duplicate_nested_lists(l):
""" removes duplicates from nested integer lists
"""
uniq = set(tuple(x) for x in l)
L = [ list(x) for x in uniq ]
return L
def remove_similar_lists(lst, lengths_lst, medium_threshold = 200):
""" removes likely assembly errors near repeats by deconvulting medium length contigs
We have already filtered out short contigs, but there are still these that cause problemswith the graph tracing.
"""
# tmp is a list of equal length with the original list, and values are a list
# of the original list's node's lenghts. If the length falls below the threshold,
# we essentially remove it, so that later we can remove duplicates
deduplicated = [] # recipient structure
masked_lengths_list = []
for lengths in lengths_lst:
masked_lengths = [x for x in lengths if x > medium_threshold ]
masked_lengths_list.append(masked_lengths)
# now, identify those lists sharing final nodes.
path_ends = set([x[-1] for x in lst]) # final nodes
for n in path_ends:
# here are all the paths sharing this end point
sublist_nodes = [] # cant use list comp cause I need the indexes
sublist_lengths = []
for i, l in enumerate(lst):
if l[-1] == n:
sublist_nodes.append(l)
sublist_lengths.append(masked_lengths_list[i])
# Within this sublist, make lists of the unique paths
# (though these should be all distinct) and unique path lengths
# (as these could contain duplicates). Then, we check if the lengths
# of list of uniqe lengths and number of paths are the same.
# If they are the same, we add all the paths to the returned list.
# these should always be unique
uniq_paths_to_end = set(tuple(x) for x in sublist_nodes)
uniq_lengths_of_paths_to_end = set(tuple(x) for x in sublist_lengths)
if len(uniq_lengths_of_paths_to_end) != len(sublist_nodes):
# we can tell we have duplicate paths, but we dont know how many.
# There could be two duplicate paths and another distinct path to
# the node, we go uniqe path by unique path.
for uniq_lengths in uniq_lengths_of_paths_to_end:
# for each of these unique length lists, we should be
# returning a representative path
# This marks whether we have found it yet.
# This could probably be refactored with a well-placed "break"
selected_representative = False
for subpath, sublengths in zip(sublist_nodes, sublist_lengths):
# if this sublengh has a duplicate, add the corresponding
# path of only the first one to be returned
if tuple(sublengths) == uniq_lengths and not selected_representative:
deduplicated.append(subpath)
selected_representative = True
else:
deduplicated.extend(sublist_nodes)
return deduplicated
def add_temp_edges(node_list, G):
""" add temporary edges to all nodes in graph
later we will weight them and redo the directionality if needed
"""
for node in node_list:
for neigh in node.neighbor_list:
G.add_edge(neigh.name, node.name)
def find_collapsable_partial_rRNA_nodes(
rrnas, G, nodes_data, threshold=200, logger=None):
""" return list of nodes where partial rrna loci neighbor full-length loci
We only collapse short nodes, by defualt 200, which should capture all
the 5S/tRNA nodes
"""
collapsed = []
for k, vals in rrnas.items():
logger.debug("checking for collapsable %s nodes" %k)
these_collapsed = []
if len(vals["partial"]) == 0:
continue
# check if partial node neighbors true node
# print(vals)
for part in vals["partial"]:
# ifs a big node with just a bit of a rRNA on an end, we retain it
if nodes_data[part]["length"] > threshold:
continue
# print("partial: %i" %part)
if len(vals['solid']) == 0:
# TODO collapse partials into one node if the are close enough
pass
for solid in vals["solid"]:
# solidlen = dict(G.nodes(solid))['length']
if part in G.neighbors(solid):
# print(G.edges(solid))
for d in G.edges(part):
if d[1] != solid and d[1] not in G.neighbors(solid):
# ammend graph to reflect the collapse;
# we dont remove nodes,
# but we have the new edges
# (make a bi-directional graph for now)
#############################################3
G.add_edge(solid , d[1])
G.add_edge(d[1], solid)
# G.remove_node(part)
#############################################3
these_collapsed.append(part)
collapsed.extend(these_collapsed)
return collapsed
def make_path_without_simple_bubbles(path, g, nodes_data, threshold=20000, logger=None):
""" return a path after filtering out nodes that could cause a bubble..
Integrated prophages or integrated plasmids can cause bubbles in the
assembly graph, where the integrases at each end form a repeat. We set a
heuistic threshold of 20kb which should include most of these but exlude
larger bubble that may be relavent.
To do this, we go along each node. This is a directional graph at this
point, if a node's neighors have the node as THEIR neighbor, we got a
bubble!. If there are two nodes separating, a bubble will not be
detected, hene the "sinple" in the method name
"""
filtered_path = [path[0]]
for i, n in enumerate(path):
# we ignore bubbles at the rRNA. The assemlby there is always a mess
if i == 0:
continue
node = nodes_data[n]
bubble_found = False
# logger.debug("node %s neighbors : %s", n, " ".join([str(x) for x in g.neighbors(n)]))
for neigh in g.neighbors(n):
full_neigh = nodes_data[neigh]
if (
n in g.neighbors(neigh) and \
full_neigh["length"] <= threshold
):
logger.debug("found possible bubble at node %s", node)
bubble_found = True
if not bubble_found:
filtered_path.append(n)
return filtered_path
@do_profile(follow=[make_silly_boxplot])
def process_assembly_graph(args, fastg, output_root, PLOT, which_k, logger):
# make a list of node objects, a adjacency matrix M, and a DiGRaph object G
logger.info("Reading assembly graph: %s", fastg)
node_list, G_dict, G = parse_fastg(f=fastg)
if PLOT and args.MAKE_ADJACENCY_MATRIX:
M = make_adjacency_matrix(G_dict)
plot_adjacency_matrix(
M, node_order=None, partitions=[], colors=[],
outpath=os.path.join(output_root, "full_adjacency_matrix.pdf"))
logger.info("Writing out adjacency graph")
with open(os.path.join(args.output, "tab.txt"), "w") as o:
for line in M:
o.write("\t".join([str(x) for x in line]) + "\n")
# alt nodes
dic = {int(x.name): [int(y.name) for y in x.neighbor_list] for x in node_list}
# run barrnap to find our rDNAs; first time trhough is to find "full" -
# length genes, and the second run with the relaxed thresholds is for
# finding partial genes
barrnap_gff = os.path.join(output_root, "strict_barrnapped.gff")
barrnap_gff_partial = os.path.join(output_root, "partial_barrnapped.gff")
barrnap_cmd = make_barrnap_cmd(
infasta=fastg,
outgff=barrnap_gff,
exe=args.barrnap_exe,
threads=args.cores,
thresh=args.barrnap_length_threshold,
evalue=1e-06,
kingdom="bac")
barrnap_cmd_partial = make_barrnap_cmd(
infasta=fastg,
outgff=barrnap_gff_partial,
exe=args.barrnap_exe,
threads=args.cores,
thresh=0.1,
evalue=10,
kingdom="bac")
for cmd in [barrnap_cmd, barrnap_cmd_partial]:
logger.info("running barrnap cmd: %s", cmd)
subprocess.run(cmd,
shell=sys.platform != "win32",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True)
# determine which ones are our 16s, 23s, and 5s nodes
gff_list = make_gff_list(barrnap_gff)
# logger.debug(gff_list)
gff_list_partial = make_gff_list(barrnap_gff_partial)
# logger.debug(gff_list_partial)
# dict of {gene: {partial: [nodes]; solid: [nodes]}}has keys of gense, where the value are dict
rrnas = make_rRNAs_dict(gff_list, gff_list_partial)
logger.debug(rrnas)
RUN_16S, RUN_23S = check_rrnas_dict(rrnas, logger=logger)
# this holds the {data} of the nodes keyed by their name
nodes_data = dict(G.nodes(data=True))
# add temporary edges; later we will weight them and redo the directionality if needed
add_temp_edges(node_list, G)
nodes_data = dict(G.nodes(data=True))
# get the depths of the big contigs
depths_of_all_nodes, ave_depth_all_node, qs_all = get_depth_of_big_nodes(
G, threshold=0)
depths_of_big_nodes, ave_depth_big_node, qs_big = get_depth_of_big_nodes(
G, threshold=4000)
if PLOT:
make_silly_boxplot(
vals=[depths_of_all_nodes, depths_of_big_nodes],
names=["All nodes", "Nodes > 4kb"],
title="Depths of Nodes in Assembly Graph",
outpath=os.path.join(output_root, "average_node_depths.pdf")
)
collapsed = find_collapsable_partial_rRNA_nodes(rrnas, G, nodes_data=nodes_data, logger=logger)
logger.info("marked %i partial nodes for collapsing:", len(collapsed))
logger.debug(collapsed)
# remove short nodes
short_nodes = []
logger.info("Removing nodes shorter than '-m' arg")
for node in G.nodes():
if nodes_data[node]["length"] < args.min_contig_len:
short_nodes.append(node)
logger.info("marked %i short nodes for ignoring:", len(short_nodes))
logger.info(short_nodes)
collapsed.extend(short_nodes)
######## Reduce this graph to all nodes within 20kb if a 16S region
interior_nodes = []
border_nodes = []
for i in rrnas["16S"]["solid"]:
interior, border = neighborhood_by_length(
G,
i,
cutoff=args.threshold,
ignored_nodes=[])
logger.debug("16S node %i (close) neighbors:", i)
logger.debug(interior)
logger.debug("16S node %i (border) neighbors", i)
logger.debug(border)
interior_nodes.extend(interior)
border_nodes.extend(border)
if rrnas["16S"]["solid"] != rrnas["23S"]["solid"]:
for i in rrnas["23S"]["solid"]:
interior, border = neighborhood_by_length(
G,
i,
cutoff=args.threshold,
ignored_nodes=rrnas["16S"]["solid"])
logger.debug("23S node %i (close) neighbors:", i)
logger.debug(interior)
logger.debug("23S node %i (border) neighbors", i)
logger.debug(border)
interior_nodes.extend(interior)
border_nodes.extend(border)
valid_nodes = set(interior_nodes).union(set(border_nodes))
bad_nodes = set(G.nodes).symmetric_difference(valid_nodes)
logger.debug("removing %i of %i nodes that aren't near rDNA", len(bad_nodes), len(G.nodes))
# logger.debug(bad_nodes)
for node in bad_nodes:
G.remove_node(node)
if PLOT and args.plot_graphs and len(G) != 0 :
plot_G(
G,
rrnas["5S"]["solid"],
rrnas["16S"]["solid"],
rrnas["23S"]["solid"],
outpath=os.path.join(args.output, "test_post_reduction_G.pdf"),
outpath2=os.path.join(args.output, "test_post_reduction_G_linegraph.pdf"),
)
####### identify paths between the 16S and 23S, if its less than say 500bp
# connector_paths = []
# for node16 in rrnas["16S"]["solid"]:
# for node23 in rrnas["23S"]["solid"]:
# connector_paths.extend(
# nx.all_simple_paths(G, node16, node23, cutoff=500)
# )
# print("number of connector paths: %i" %len(connector_paths))
#####################################################################################
"""
So at this point, we have a poor implementation of the directionality of
the graph, which we will need in a minute for path finding. What we
will do here is make a brand new subgraph for both the 5' and 3' regions
of the operpn.
lets try to add some edges to our graph First we need to identify our
center. For simple instaces (which is all we handle for now), there are
two scenarios:
1) 1 clearly defined cluster, where the three genes are all on one contig.
2) 1 clearly defined contig for each gene.
In all cases, we set the center of the graph to be the end of the 16S
gene. We assume that they are both pointing in the same direction.
If that is not the case, we have a problem.
. center/break
. *
========|16S| ==|23S|=|5S|=================
<----------- ----------------------->
in both these cases, we take the same approach. Look for paths from
(reverse complimented) 16S and from 23S
"""
subset_node_list = []
for n in node_list:
if n.name in G.nodes():
subset_node_list.append(n)
subgraphs = {}
ANY_TANDEM = "N"
rRNA_counts_for_plotting = ([], []) # paths to the 16S and 23S rRNAs
for region in ["16S", "23S"]:
counter = 0
for idx, region_node in enumerate(rrnas[region]['solid']):
subgraph_name = "{}_{}".format(region, counter)
counter = counter + 1
if (
(region == "16S" and not RUN_16S) or
(region == "23S" and not RUN_23S)
):
logger.info("Skipping proccessing %s nodes", region)
subgraphs[subgraph_name] = {
"raw_paths": [],
"filtered_paths": [],
"graph": None
}
continue
REV = False
# lets check the gff to see if the 16S is a positive or a negative hit:
node = nodes_data[region_node]
for line in gff_list:
# identify the line with matching region and full_node_name
if region in line[8] and node['raw'] in line[0].split(":")[0]:
if line[6] == "-":
REV = True
break
# whos on first?
# all this does is to make sure we take the oposite path from
# the 16S nodes
reverse_compliment_now = REV if region == "16S" else not REV
# retrieve the starting node (16S or 23S) from the list of nodes,
# making sure we get the one with the correct orientation
init_node = None
for N in subset_node_list:
if N.name == region_node and N.reverse_complimented == REV:
init_node = N
if init_node is None:
raise ValueError("no initial node found to initiate recursive subtree construction")
logger.debug("Initial node")
logger.debug(init_node)
# add this initial node to a brand new DiGraph
g = nx.DiGraph()
g.add_node(
init_node.name,
cov=init_node.cov,
length=init_node.length,
reverse_complimented=init_node.reverse_complimented,
raw=init_node.raw)
# here is the path tracing algorithm.
# We get all paths leading to the 16S, or all the paths away from 23S
logger.info("recursivly populating %s subgraph from initial node",
subgraph_name)
if region == "16S":
populate_subgraph_from_source(
g=g,
root=init_node,
length=0,
cutoff=args.threshold,
node_list=subset_node_list,
counter=1)
else:
reverse_populate_subgraph_from_source(
g=g,
root=init_node,
node_list=subset_node_list,
cutoff=args.threshold,
counter=1)
logger.debug("nodes in %s subgraph", subgraph_name)
logger.debug(g.nodes())
subgraph_nodes_data = dict(g.nodes(data=True))
if PLOT and args.plot_graphs:
logger.info("plotting reconstructed tree from %s node %s",
subgraph_name, init_node.name)
plot_G(
g,
rrnas["5S"]["solid"],
rrnas["16S"]["solid"],
rrnas["23S"]["solid"],
outpath=os.path.join(args.output, "subgraph_%s.pdf" % subgraph_name),
outpath2=os.path.join(
args.output, "line_subgraph_%s.pdf" % subgraph_name),
)
if PLOT:
plot_adjacency_matrix(
make_adjacency_matrix(nx.convert.to_dict_of_dicts(g)), node_order=None,
partitions=[], colors=[],
outpath=os.path.join(
args.output,
"%s_subgraph_adjacency_matrix.pdf" % subgraph_name))
# count the paths going out from the 16S/23S
logger.debug("tips will have an out-degree of 0 on this reduced graph")
logger.debug([g.out_degree(node) for node in g.nodes()])
tips = [node for node in g.nodes() if g.out_degree(node) == 0]
logger.debug("tips of subgraph:")
logger.debug(tips)
out_paths_region_raw = []
for tip in tips:
out_paths_region_raw.extend(nx.all_simple_paths(g, region_node, tip))
logger.info("number of raw paths to %s: %i",
subgraph_name, len(out_paths_region_raw))
for i in out_paths_region_raw:
logger.debug(i)
# # we want to be able to trace the fate of of each possible path through the various filtering stages, we lets make it into a dict here
# exit_paths_dict = {k, {"raw": v} for k,v in enumerate(out_paths_region_raw)}
"""
remember the collaping stuff we figured out before? Lets remove
those collapsable nodes from the paths now, and get rid of
redundant paths. We also take this opportunity to filter out
paths that include tips not at the tip. It happens, somehow...
"""
out_paths_region_sans_collapsed = []
for path in out_paths_region_raw:
new_path = []
internal_tip = False
for i in path:
if i not in collapsed:
new_path.append(i)
# here we only add it the path doesnt contain tips (excluding the last item)
for tip in tips:
if tip in path[0: len(path) - 1]:
internal_tip = True
else:
pass
if not internal_tip:
out_paths_region_sans_collapsed.append(new_path)
else:
logger.debug(
"removed path for containg collapsable node: %s",
" ".join([str(x) for x in new_path]))
# filter out duplicated paths:
out_paths_region_sans_collapsed_naive = remove_duplicate_nested_lists(
out_paths_region_sans_collapsed)
logger.info("number of de-duplicated paths to %s: %i",
subgraph_name, len(out_paths_region_sans_collapsed_naive))
for i in out_paths_region_sans_collapsed_naive:
logger.debug(i)
# make list of the lengths of the nodes in the paths
# we use this to filter out needlessly complex paths through a bunch of tiny nodes
path_node_lengths = []
for i, l in enumerate(out_paths_region_sans_collapsed_naive):
lengths = [nodes_data[x]["length"] for x in l]
path_node_lengths.append(lengths)
out_paths_region_sans_collapsed = remove_similar_lists(
out_paths_region_sans_collapsed_naive,
path_node_lengths,
medium_threshold = args.medium_length_threshold)
logger.info("number of dissimilar paths to %s: %i",
subgraph_name, len(out_paths_region_sans_collapsed))
for i in out_paths_region_sans_collapsed:
logger.debug(i)
# now we have filtered, and some paths might not be long enough.
# Others, if the graph is cyclical, will be too long. here, we trim and filter!
out_paths_region_re_length_filtered = []
for path in out_paths_region_sans_collapsed:
filtered_path = [path[0]]
filtered_length = 0
anchor_found = False
for i, node in enumerate(path):
# skip first node, our root (either 16S or 23S)
if i > 0:
node_length = nodes_data[node]["length"]
if node_length > args.min_anchor_length:
anchor_found = True
filtered_length = filtered_length + node_length
filtered_path.append(node)
if filtered_length > args.threshold:
break
# if path does indeed pass the threshold, and an
# "anchor" node has been found
if filtered_length > 1000 and anchor_found:
out_paths_region_re_length_filtered.append(filtered_path)
out_paths_region_re_length_filtered = remove_duplicate_nested_lists(
out_paths_region_re_length_filtered)
# filter out bubble nodes
out_paths_region = []
for p in out_paths_region_re_length_filtered:
out_paths_region.append(
make_path_without_simple_bubbles(
path=p,
g=g,
nodes_data=subgraph_nodes_data,
threshold=20000, logger=logger)
)
# filter out duplicated paths, again:
out_paths_region = remove_duplicate_nested_lists(out_paths_region)
logger.debug(
"Removed %i paths that contained collapsable nodes, etc" % \
(len(out_paths_region_raw) - len(out_paths_region)))
logger.info("%s Paths:\n", subgraph_name)
for i in out_paths_region:
logger.info(i)
all_region_path_nodes = [x for y in out_paths_region for x in y]
set_region_path_nodes_normalized_depth = {}
for i in set(all_region_path_nodes):
set_region_path_nodes_normalized_depth[i] = nodes_data[i]['cov'] / all_region_path_nodes.count(i)
if PLOT and len(set_region_path_nodes_normalized_depth.values()) > 1:
make_silly_boxplot(
vals=[x for x in set_region_path_nodes_normalized_depth.values()],
outpath=os.path.join(output_root, "normalized_depths_of_%s_nodes.pdf" % subgraph_name),
title= "Normalized depths of nodes connected to %s" % subgraph_name,
yline=(qs_big[0], ave_depth_big_node, qs_big[2])
)
logger.debug("determining the depths of the %s paths" % subgraph_name)
all_region_path_depths = []
for i, path in enumerate(out_paths_region):
sublist = []
for node in path:
sublist.append(set_region_path_nodes_normalized_depth[node])
all_region_path_depths.append(sublist)
if PLOT and len(all_region_path_depths) > 1:
make_silly_boxplot(
vals=all_region_path_depths,
outpath=os.path.join(output_root, "normalized_depths_of_%s_exiting_paths.pdf" % subgraph_name),
title= "Depths of paths connected to %s" % subgraph_name,
yline=(qs_big[0], ave_depth_big_node, qs_big[2])
)
subgraphs[subgraph_name] = {
"raw_paths": out_paths_region_raw,
"filtered_paths": out_paths_region,
"graph": g
}
n_upstream = len(subgraphs[subgraph_name]["filtered_paths"])
# detect tandem repeats:
if region == "16S":
# if the any 23S nodes are in the graph, we may have a tandem repeat
for path in subgraphs[subgraph_name]["filtered_paths"]:
# we ignore the first one, which is the initial node
if (
len(set(rrnas["23S"]["solid"]).intersection(set(path[1: ]))) > 0 or
len(set(rrnas["23S"]["partial"]).intersection(set(path[1: ]))) > 0
):
ANY_TANDEM = "Y"
else:
assert region == "23S", "invalid region"
# if the any 23S nodes are in the graph, we may have a tandem repeat
for path in subgraphs[subgraph_name]["filtered_paths"]:
# we ignore the first one, which is the initial node
if (
len(set(rrnas["16S"]["solid"]).intersection(set(path[1: ]))) > 0 or
len(set(rrnas["16S"]["partial"]).intersection(set(path[1: ]))) > 0
):
ANY_TANDEM = "Y"
# interpret results
# n_upstream = len(subgraphs[subgraph_name]["filtered_paths"])
# n_downstream = len(subgraphs[subgraph_name]["filtered_paths"])
# conclusive = "N"
# logger.info("Paths leading to rDNA operon: %i", n_upstream)
# logger.info("Paths exiting rDNA operon: %i", n_downstream)
# if n_upstream == n_downstream:
# logger.info("This indicates that there are at least %i rDNAs present",
# n_upstream)
# conclusive = "Y"
# else:
# logger.info("inconclusive results")
# write out results to tidy file. we are appending in case we have
# multiple files. The columns are file_path, k, which_subgraph, n_paths_out,
# conclusive_or_not, any_tandem_repeats
if region == "16S":
rRNA_counts_for_plotting[0].append(n_upstream)
else:
rRNA_counts_for_plotting[1].append(n_upstream)
outfile = os.path.join(output_root, "riboSpec_results.tab")
with open(outfile, "a") as o:
o.write(
"{fastg}\t{which_k}\t{subgraph_name}\t{n_upstream}\t\t{ANY_TANDEM}\n".format(
**locals()))
return rRNA_counts_for_plotting
def get_fastgs_from_spades_dir(d, logger=None):
""" get he paths to the assembly graphs in a spades output dir.
There will be one for the main output in the root of the directory,
and then one in each of the subdirectories for the different kmer
assemblies (ie ./root/K21/assembly_graph.fastg).
Returns the path to the main assembly graph and a dict of subgraphs,
{21: "/path/to/k21/assembly_graph.fastg, ...}
"""
main_g = os.path.join(d, "assembly_graph.fastg")
if not os.path.exists(main_g):
logger.error(
"assembly_graph.fastg not found in " +
"%s. -f arg must be a SPAdes dir ", d)
sys.exit(1)
# this is silly, but compute time is cheap and
# me-figuring-out-how-to-easily-get-k-from-filepath time is not cheap.
max_k = 151
ks = []
for k in range(1,max_k + 1, 2):
if len(glob.glob(os.path.join(d, "K" + str(k), ""))) != 0:
ks.append(str(k))
# k_dirs = glob.glob(os.path.join(d, "K\d*/"))
ks_dict = {"final": main_g}
for k in ks:
k_path = os.path.join(d, "K" + k, "assembly_graph.fastg")
assert os.path.exists(k_path), "%s not found" % k_path
ks_dict[k] = k_path
return ks_dict
def main(args, logger=None):
output_root = os.path.abspath(os.path.expanduser(args.output))
try:
os.makedirs(output_root, exist_ok=False)
except OSError:
print("Output directory already exists; exiting...")
sys.exit(1)
log_path = os.path.join(output_root, "riboSpec.log")
if logger is None:
logger = set_up_logging(verbosity=args.verbosity,
outfile=log_path,
name=__name__)
logger.info("Usage:\n%s\n", " ".join([x for x in sys.argv]))
logger.debug("All settings used:")
for k, v in sorted(vars(args).items()):
logger.debug("%s: %s", k, str(v))
if args.cores is None:
args.cores = multiprocessing.cpu_count()
# check if handling a single output
nfastgs = 1
if os.path.isdir(args.assembly_graph):
fastg_dict = get_fastgs_from_spades_dir(args.assembly_graph, logger=logger)
nfastgs = len(fastg_dict)
logger.info("Determining rRNA operon paths for each of the following fastgs:")
# these two for loops are not combined cause I wanna see the
# input files from the get go
for k,v in fastg_dict.items():
logger.info(v)
rRNA_counts_for_plotting = ([], [])
for k,v in fastg_dict.items():
sub_rRNA_counts_for_plotting = process_assembly_graph(
args=args,
fastg=v,
output_root=output_root,
PLOT=PLOT,
which_k=k,
logger=logger)
rRNA_counts_for_plotting[0].extend(sub_rRNA_counts_for_plotting[0])
rRNA_counts_for_plotting[1].extend(sub_rRNA_counts_for_plotting[1])
else:
rRNA_counts_for_plotting = process_assembly_graph(
args=args,
fastg=args.assembly_graph,
output_root=output_root,
PLOT=PLOT,
which_k="final",
logger=logger)
# log results and send to stdout
with open(os.path.join(output_root, "riboSpec_results.tab"), "r") as of:
for line in of:
sys.stdout.write(line) # we want the newline here
logger.info(line.strip())
if PLOT:
make_silly_boxplot(
vals=rRNA_counts_for_plotting,
outpath=os.path.join(output_root, "rRNA_counts.pdf"),
title= "rRNA counts estimated from %s assembly graph(s)" % nfastgs,
names=["16S", "23S"]
)
| 42.153846 | 517 | 0.573156 |
44c7ff483df3bb819d0dbb07c2033ece4962e6ba | 1,094 | py | Python | fuel_agent/fuel_agent/utils/fs_utils.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | 1 | 2021-04-06T16:13:35.000Z | 2021-04-06T16:13:35.000Z | fuel_agent/fuel_agent/utils/fs_utils.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | null | null | null | fuel_agent/fuel_agent/utils/fs_utils.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fuel_agent.utils import utils
def make_fs(fs_type, fs_options, fs_label, dev):
# NOTE(agordeev): notice the different flag to force the fs creating
# ext* uses -F flag, xfs/mkswap uses -f flag.
cmd_line = []
cmd_name = 'mkswap'
if fs_type is not 'swap':
cmd_name = 'mkfs.%s' % fs_type
cmd_line.append(cmd_name)
for opt in (fs_options, fs_label):
cmd_line.extend([s for s in opt.split(' ') if s])
cmd_line.append(dev)
utils.execute(*cmd_line)
| 36.466667 | 74 | 0.702925 |
a6be99653be7115d869ba8eb4b4e73e90195195d | 233 | py | Python | sphinx_graphiql/__init__.py | dbuksha-wis/sphinx-graphiql | 842f9c478836aaed25ac59e91dda1c1fcbec5747 | [
"MIT"
] | null | null | null | sphinx_graphiql/__init__.py | dbuksha-wis/sphinx-graphiql | 842f9c478836aaed25ac59e91dda1c1fcbec5747 | [
"MIT"
] | null | null | null | sphinx_graphiql/__init__.py | dbuksha-wis/sphinx-graphiql | 842f9c478836aaed25ac59e91dda1c1fcbec5747 | [
"MIT"
] | 1 | 2020-01-23T08:57:12.000Z | 2020-01-23T08:57:12.000Z | from __future__ import absolute_import
from .sphinx_graphiql import SphinxGraphiQL
def setup(app):
app.add_directive('graphiql', SphinxGraphiQL)
return {'parallel_read_safe': True,
'parallel_write_safe': True}
| 23.3 | 49 | 0.746781 |
0034d74c08b2f7f6d81f5c503c75e4b810173ddc | 202 | py | Python | cadnano/views/gridview/tools/__init__.py | sherwoodyao/cadnano2.5 | ce6ff019b88ee7728de947bd86b35861cf57848d | [
"BSD-3-Clause"
] | 69 | 2015-01-13T02:54:40.000Z | 2022-03-27T14:25:51.000Z | cadnano/views/gridview/tools/__init__.py | scholer/cadnano2.5 | ce6ff019b88ee7728de947bd86b35861cf57848d | [
"BSD-3-Clause"
] | 127 | 2015-01-01T06:26:34.000Z | 2022-03-02T12:48:05.000Z | cadnano/views/gridview/tools/__init__.py | scholer/cadnano2.5 | ce6ff019b88ee7728de947bd86b35861cf57848d | [
"BSD-3-Clause"
] | 48 | 2015-01-22T19:57:49.000Z | 2022-03-27T14:27:53.000Z | # -*- coding: utf-8 -*-
from cadnano.util import to_dot_path
gtp = to_dot_path(__file__)
CreateGridToolT = gtp + '.creategridtool.CreateGridTool'
SelectGridToolT = gtp + '.selectgridtool.SelectGridTool' | 40.4 | 56 | 0.772277 |
34c2918fe663823e8e4c891d3998298fbdec385b | 1,534 | py | Python | tasks.py | agateriver/django-make-app | 1c390b78284ee727c645e560c323b53f05629fd4 | [
"MIT"
] | null | null | null | tasks.py | agateriver/django-make-app | 1c390b78284ee727c645e560c323b53f05629fd4 | [
"MIT"
] | null | null | null | tasks.py | agateriver/django-make-app | 1c390b78284ee727c645e560c323b53f05629fd4 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
# ! python3
import shutil
from invoke import run, task
@task
def clean_build():
shutil.rmtree('x_django_app_maker.egg-info', ignore_errors=True)
shutil.rmtree('build', ignore_errors=True)
shutil.rmtree('dist', ignore_errors=True)
shutil.rmtree('__pycache__', ignore_errors=True)
@task
def lint():
run("flake8 x_django_app_maker tests")
@task
def test():
run("py.test --verbose --showlocals tests/")
@task
def test_all():
run("tox")
@task
def test_cov():
run("py.test --verbose --showlocals --cov=x_django_app_maker tests/")
@task
def test_setuptools():
run("python setup.py test")
@task
def test_nosetests():
run("python setup.py nosetests -v --with-doctest")
@task
def coverage():
run("coverage run --source x_django_app_maker setup.py test")
run("coverage report -m")
run("coverage html")
@task
def install_requirements():
run("pip install -r requirements.txt --upgrade --use-wheel")
@task
def test_install():
run("pip uninstall x_django_app_maker --yes", warn=True)
run("pip install --use-wheel --no-index --find-links dist x_django_app_maker")
run("pip uninstall x_django_app_maker --yes")
@task
def build():
run("python setup.py check --verbose --strict --restructuredtext")
run("python setup.py build")
run("python setup.py sdist")
run("python setup.py bdist_wheel")
@task
def publish():
run('python setup.py sdist upload -r pypi')
run('python setup.py bdist_wheel upload -r pypi')
| 19.175 | 82 | 0.682529 |
5ecb897df22febdead29d25f7818602c4b97f971 | 2,893 | py | Python | tb_rest_client/models/models_ce/mqtt_device_transport_configuration.py | fargiolas/thingsboard-python-rest-client | 8c439e27218226b356e8203c2a7f1239278669c0 | [
"Apache-2.0"
] | null | null | null | tb_rest_client/models/models_ce/mqtt_device_transport_configuration.py | fargiolas/thingsboard-python-rest-client | 8c439e27218226b356e8203c2a7f1239278669c0 | [
"Apache-2.0"
] | null | null | null | tb_rest_client/models/models_ce/mqtt_device_transport_configuration.py | fargiolas/thingsboard-python-rest-client | 8c439e27218226b356e8203c2a7f1239278669c0 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
ThingsBoard REST API
ThingsBoard open-source IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3-SNAPSHOT
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from .device_transport_configuration import DeviceTransportConfiguration
class MqttDeviceTransportConfiguration(DeviceTransportConfiguration):
"""
Do not edit the class manually.
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
if hasattr(DeviceTransportConfiguration, "swagger_types"):
swagger_types.update(DeviceTransportConfiguration.swagger_types)
attribute_map = {
}
if hasattr(DeviceTransportConfiguration, "attribute_map"):
attribute_map.update(DeviceTransportConfiguration.attribute_map)
def __init__(self, *args, **kwargs): # noqa: E501
"""MqttDeviceTransportConfiguration - a model defined in Swagger""" # noqa: E501
self.discriminator = None
DeviceTransportConfiguration.__init__(self, *args, **kwargs)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(MqttDeviceTransportConfiguration, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MqttDeviceTransportConfiguration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.791209 | 89 | 0.603871 |
a7a85b22cc203e3e34de723fb34a9d046bf538f6 | 1,698 | py | Python | mail_flask.py | aadottori/mail_flask | 28e2427a768d6c33d831beafdd71f5ab62043f29 | [
"MIT"
] | 1 | 2020-01-17T02:43:05.000Z | 2020-01-17T02:43:05.000Z | mail_flask.py | aadottori/mail_flask | 28e2427a768d6c33d831beafdd71f5ab62043f29 | [
"MIT"
] | null | null | null | mail_flask.py | aadottori/mail_flask | 28e2427a768d6c33d831beafdd71f5ab62043f29 | [
"MIT"
] | 1 | 2020-07-18T16:37:02.000Z | 2020-07-18T16:37:02.000Z | from flask import Flask
from flask_mail import Mail, Message
app = Flask(__name__)
# Mail configuration
app.config['DEBUG'] = True
app.config['TESTING'] = False
app.config['MAIL_SERVER'] = 'smtp.gmail.com' # Here, we use Gmail, but you can use any other.
app.config['MAIL_PORT'] = 587
app.config['MAIL_USE_TLS'] = True
app.config['MAIL_USE_SSL'] = False
app.config['MAIL_USERNAME'] = 'email@adress.com' # Your account's login.
app.config['MAIL_PASSWORD'] = 'password' # Your account's password.
app.config['MAIL_DEFAULT_SENDER'] = ("Sender's name", 'sender@adress.com')
app.config['MAIL_MAX_EMAILS'] = 1 # The max amount of emails to send in a single connection.
app.config['MAIL_SUPRESS_SEND'] = False # If this is set to True, the email will not be actually sent. Used for testing.
app.config['MAIL_ASCII_ATTACHMENTS'] = True
mail = Mail(app)
def sendEmail(email):
msg = Message("Title", recipients=[email]) # The list should contain the recipients' emails.
msg.body = "This is the message's body."
msg.html = "<p> This is for message's HTML, for <b>pretty</b> emails. </p>"
'''This last part is meant for sending emails with attachments. I put two examples: pdf and png.
This way, the archives should be put in the same folder as your app. If you wish, you could pass the archives
paths as parameters, and put them at app.open_resource.'''
with app.open_resource("archive.pdf") as pdf_attachment:
msg.attach("archive.pdf", "application/pdf", pdf_attachment.read())
with app.open_resource("image.png") as png_attachment:
msg.attach("image.png", "image/png", png_attachment.read())
mail.send(msg) # Finally, the email is sent. | 47.166667 | 120 | 0.710247 |
10ab74ad6028df5b59e27ebde00224e8f2f37d03 | 31,510 | py | Python | src/opserver/uveserver.py | biswajit-mandal/contrail-controller | 80c4a7e8515f7296b18ba4c21a439bd3daefcc4a | [
"Apache-2.0"
] | null | null | null | src/opserver/uveserver.py | biswajit-mandal/contrail-controller | 80c4a7e8515f7296b18ba4c21a439bd3daefcc4a | [
"Apache-2.0"
] | null | null | null | src/opserver/uveserver.py | biswajit-mandal/contrail-controller | 80c4a7e8515f7296b18ba4c21a439bd3daefcc4a | [
"Apache-2.0"
] | 1 | 2021-03-09T10:44:33.000Z | 2021-03-09T10:44:33.000Z | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
#
# UVEServer
#
# Operational State Server for UVEs
#
import gevent
import json
import copy
import xmltodict
import redis
import datetime
import sys
from opserver_util import OpServerUtils
import re
from gevent.coros import BoundedSemaphore
from pysandesh.util import UTCTimestampUsec
from pysandesh.connection_info import ConnectionState
from sandesh.viz.constants import UVE_MAP
from pysandesh.gen_py.process_info.ttypes import ConnectionType,\
ConnectionStatus
import traceback
class UVEServer(object):
def __init__(self, redis_uve_server, logger, redis_password=None, \
uvedbcache=None, usecache=False):
self._local_redis_uve = redis_uve_server
self._redis_uve_map = {}
self._logger = logger
self._redis = None
self._uvedbcache = uvedbcache
self._usecache = usecache
self._redis_password = redis_password
self._uve_reverse_map = {}
for h,m in UVE_MAP.iteritems():
self._uve_reverse_map[m] = h
#end __init__
def redis_instances(self):
return set(self._redis_uve_map.keys())
def update_redis_uve_list(self, redis_uve_list):
newlist = set(redis_uve_list)
chg = False
# if some redis instances are gone, remove them from our map
for test_elem in self._redis_uve_map.keys():
if test_elem not in newlist:
chg = True
r_ip = test_elem[0]
r_port = test_elem[1]
del self._redis_uve_map[test_elem]
ConnectionState.delete(ConnectionType.REDIS_UVE,\
r_ip+":"+str(r_port))
# new redis instances need to be inserted into the map
for test_elem in newlist:
if test_elem not in self._redis_uve_map:
chg = True
r_ip = test_elem[0]
r_port = test_elem[1]
self._redis_uve_map[test_elem] = None
ConnectionState.update(ConnectionType.REDIS_UVE,\
r_ip+":"+str(r_port), ConnectionStatus.INIT)
if chg:
self._logger.error("updated redis_uve_list %s" % str(self._redis_uve_map))
# Exercise redis connections to update health
if len(newlist):
self.get_uve("ObjectCollectorInfo:__NONE__", False, None)
# end update_redis_uve_list
def fill_redis_uve_info(self, redis_uve_info):
redis_uve_info.ip = self._local_redis_uve[0]
redis_uve_info.port = self._local_redis_uve[1]
redish = redis.StrictRedis(self._local_redis_uve[0],
self._local_redis_uve[1],
password=self._redis_password,
db=1)
try:
redish.ping()
except redis.exceptions.ConnectionError:
redis_uve_info.status = 'DisConnected'
else:
redis_uve_info.status = 'Connected'
#end fill_redis_uve_info
@staticmethod
def merge_previous(state, key, typ, attr, prevdict):
print "%s New val is %s" % (attr, prevdict)
nstate = copy.deepcopy(state)
if UVEServer._is_agg_item(prevdict):
count = int(state[key][typ][attr]['previous']['#text'])
count += int(prevdict['#text'])
nstate[key][typ][attr]['previous']['#text'] = str(count)
if UVEServer._is_agg_list(prevdict):
sname = ParallelAggregator.get_list_name(
state[key][typ][attr]['previous'])
count = len(prevdict['list'][sname]) + \
len(state[key][typ][attr]['previous']['list'][sname])
nstate[key][typ][attr]['previous']['list'][sname].extend(
prevdict['list'][sname])
nstate[key][typ][attr]['previous']['list']['@size'] = \
str(count)
tstate = {}
tstate[typ] = {}
tstate[typ][attr] = copy.deepcopy(
nstate[key][typ][attr]['previous'])
nstate[key][typ][attr]['previous'] =\
ParallelAggregator.consolidate_list(tstate, typ, attr)
print "%s Merged val is %s"\
% (attr, nstate[key][typ][attr]['previous'])
return nstate
def run(self):
ConnectionState.update(conn_type = ConnectionType.REDIS_UVE,
name = 'LOCAL', status = ConnectionStatus.INIT)
while True:
if self._redis:
redish = self._redis
else:
redish = redis.StrictRedis(self._local_redis_uve[0],
self._local_redis_uve[1],
password=self._redis_password,
db=1)
try:
if not self._redis:
value = ""
redish.ping()
else:
k, value = redish.brpop("DELETED")
self._logger.debug("%s del received for " % value)
# value is of the format:
# DEL:<key>:<src>:<node-type>:<module>:<instance-id>:<message-type>:<seqno>
redish.delete(value)
except gevent.GreenletExit:
self._logger.error('UVEServer Exiting on gevent-kill')
break
except:
if self._redis:
#send redis connection down msg. Coule be bcos of authentication
ConnectionState.update(conn_type = ConnectionType.REDIS_UVE,
name = 'LOCAL', status = ConnectionStatus.DOWN)
self._redis = None
gevent.sleep(5)
else:
self._logger.debug("Deleted %s" % value)
if not self._redis:
self._redis = redish
ConnectionState.update(conn_type = ConnectionType.REDIS_UVE,
name = 'LOCAL', status = ConnectionStatus.UP)
@staticmethod
def _is_agg_item(attr):
if attr['@type'] in ['i8', 'i16', 'i32', 'i64', 'byte',
'u8', 'u16', 'u32', 'u64']:
if '@aggtype' in attr:
if attr['@aggtype'] == "counter":
return True
return False
@staticmethod
def _is_agg_list(attr):
if attr['@type'] in ['list']:
if '@aggtype' in attr:
if attr['@aggtype'] == "append":
return True
return False
def get_part(self, part, r_inst):
# Get UVE and Type contents of given partition on given
# collector/redis instance.
uves = {}
try:
r_ip = r_inst[0]
r_port = r_inst[1]
redish = self._redis_inst_get(r_inst)
gen_uves = {}
for elems in redish.smembers("PART2KEY:" + str(part)):
info = elems.split(":", 5)
gen = info[0] + ":" + info[1] + ":" + info[2] + ":" + info[3]
typ = info[4]
key = info[5]
if not gen in gen_uves:
gen_uves[gen] = {}
if not key in gen_uves[gen]:
gen_uves[gen][key] = {}
gen_uves[gen][key][typ] = {}
except Exception as e:
self._logger.error("get_part failed %s for : %s:%d tb %s" \
% (str(e), r_ip, r_port, traceback.format_exc()))
self._redis_inst_down(r_inst)
else:
self._redis_inst_up(r_inst, redish)
return r_ip + ":" + str(r_port) , gen_uves
def _redis_inst_get(self, r_inst):
r_ip = r_inst[0]
r_port = r_inst[1]
if not self._redis_uve_map[r_inst]:
return redis.StrictRedis(
host=r_ip, port=r_port,
password=self._redis_password, db=1, socket_timeout=90)
else:
return self._redis_uve_map[r_inst]
def _redis_inst_up(self, r_inst, redish):
if not self._redis_uve_map[r_inst]:
r_ip = r_inst[0]
r_port = r_inst[1]
self._redis_uve_map[r_inst] = redish
ConnectionState.update(ConnectionType.REDIS_UVE,
r_ip + ":" + str(r_port), ConnectionStatus.UP)
def _redis_inst_down(self, r_inst):
if self._redis_uve_map[r_inst]:
r_ip = r_inst[0]
r_port = r_inst[1]
self._redis_uve_map[r_inst] = None
ConnectionState.update(ConnectionType.REDIS_UVE,
r_ip + ":" + str(r_port), ConnectionStatus.DOWN)
def get_tables(self):
tables = set()
for r_inst in self._redis_uve_map.keys():
try:
redish = self._redis_inst_get(r_inst)
tbs = [elem.split(":",1)[1] for elem in redish.keys("TABLE:*")]
tables.update(set(tbs))
except Exception as e:
self._logger.error("get_tables failed %s for : %s tb %s" \
% (str(e), str(r_inst), traceback.format_exc()))
self._redis_inst_down(r_inst)
else:
self._redis_inst_up(r_inst, redish)
return tables
def get_uve(self, key, flat, filters=None, base_url=None):
filters = filters or {}
sfilter = filters.get('sfilt')
mfilter = filters.get('mfilt')
tfilter = filters.get('cfilt')
ackfilter = filters.get('ackfilt')
if flat and not sfilter and not mfilter and self._usecache:
return self._uvedbcache.get_uve(key, filters)
is_alarm = False
if tfilter == "UVEAlarms":
is_alarm = True
state = {}
state[key] = {}
rsp = {}
failures = False
tab = key.split(":",1)[0]
for r_inst in self._redis_uve_map.keys():
try:
redish = self._redis_inst_get(r_inst)
qmap = {}
ppe = redish.pipeline()
ppe.smembers("ALARM_ORIGINS:" + key)
if not is_alarm:
ppe.smembers("ORIGINS:" + key)
pperes = ppe.execute()
origins = set()
for origset in pperes:
for smt in origset:
tt = smt.rsplit(":",1)[1]
sm = smt.rsplit(":",1)[0]
source = sm.split(":", 1)[0]
mdule = sm.split(":", 1)[1]
if tfilter is not None:
if tt not in tfilter:
continue
if sfilter is not None:
if sfilter != source:
continue
if mfilter is not None:
if mfilter != mdule:
continue
origins.add(smt)
ppeval = redish.pipeline()
for origs in origins:
ppeval.hgetall("VALUES:" + key + ":" + origs)
odictlist = ppeval.execute()
idx = 0
for origs in origins:
odict = odictlist[idx]
idx = idx + 1
info = origs.rsplit(":", 1)
dsource = info[0]
typ = info[1]
afilter_list = set()
if tfilter is not None:
afilter_list = tfilter[typ]
for attr, value in odict.iteritems():
if len(afilter_list):
if attr not in afilter_list:
continue
if value[0] == '<':
snhdict = xmltodict.parse(value)
# TODO: This is a hack for separating external
# bgp routers from control-nodes
if snhdict[attr]['@type'] == 'map':
if typ == 'ContrailConfig' and \
tab == 'ObjectBgpRouter' and \
attr == 'elements':
try:
elem = OpServerUtils.uve_attr_flatten(\
snhdict[attr])
vendor = json.loads(\
elem['bgp_router_parameters'])["vendor"]
if vendor != "contrail":
continue
except:
pass
elif snhdict[attr]['@type'] == 'list':
sname = ParallelAggregator.get_list_name(
snhdict[attr])
if snhdict[attr]['list']['@size'] == '0':
continue
elif snhdict[attr]['list']['@size'] == '1':
if not isinstance(
snhdict[attr]['list'][sname], list):
snhdict[attr]['list'][sname] = [
snhdict[attr]['list'][sname]]
if typ == 'UVEAlarms' and attr == 'alarms' and \
ackfilter is not None:
alarms = []
for alarm in snhdict[attr]['list'][sname]:
ack_attr = alarm.get('ack')
if ack_attr:
ack = ack_attr['#text']
else:
ack = 'false'
if ack == ackfilter:
alarms.append(alarm)
if not len(alarms):
continue
snhdict[attr]['list'][sname] = alarms
snhdict[attr]['list']['@size'] = \
str(len(alarms))
else:
continue
# print "Attr %s Value %s" % (attr, snhdict)
if typ not in state[key]:
state[key][typ] = {}
if attr not in state[key][typ]:
state[key][typ][attr] = {}
if dsource in state[key][typ][attr]:
print "Found Dup %s:%s:%s:%s:%s = %s" % \
(key, typ, attr, source, mdule, state[
key][typ][attr][dsource])
state[key][typ][attr][dsource] = snhdict[attr]
pa = ParallelAggregator(state, self._uve_reverse_map)
rsp = pa.aggregate(key, flat, base_url)
except Exception as e:
self._logger.error("redis-uve failed %s for : %s tb %s" \
% (str(e), str(r_inst), traceback.format_exc()))
self._redis_inst_down(r_inst)
failures = True
else:
self._redis_inst_up(r_inst, redish)
self._logger.debug("Computed %s as %s" % (key,rsp.keys()))
return failures, rsp
# end get_uve
def get_uve_regex(self, key):
regex = ''
if key[0] != '*':
regex += '^'
regex += key.replace('*', '.*?')
if key[-1] != '*':
regex += '$'
return re.compile(regex)
# end get_uve_regex
def get_alarms(self, filters):
tables = filters.get('tablefilt')
kfilter = filters.get('kfilt')
patterns = None
if kfilter is not None:
patterns = set()
for filt in kfilter:
patterns.add(self.get_uve_regex(filt))
rsp = self._uvedbcache.get_uve_list(tables, filters, patterns, False)
return rsp
# end get_alarms
def multi_uve_get(self, table, flat, filters=None, base_url=None):
sfilter = filters.get('sfilt')
mfilter = filters.get('mfilt')
kfilter = filters.get('kfilt')
patterns = None
if kfilter is not None:
patterns = set()
for filt in kfilter:
patterns.add(self.get_uve_regex(filt))
if not sfilter and not mfilter and self._usecache:
rsp = self._uvedbcache.get_uve_list([table], filters, patterns, True)
if table in rsp:
uve_list = rsp[table]
else:
uve_list = set()
else:
# get_uve_list cannot handle attribute names very efficiently,
# so we don't pass them here
uve_list = self.get_uve_list(table, filters, False)
for uve_name in uve_list:
_,uve_val = self.get_uve(
table + ':' + uve_name, flat, filters, base_url)
if uve_val == {}:
continue
else:
uve = {'name': uve_name, 'value': uve_val}
yield uve
# end multi_uve_get
def get_uve_list(self, table, filters=None, parse_afilter=False):
is_alarm = False
filters = filters or {}
tfilter = filters.get('cfilt')
if tfilter == "UVEAlarms":
is_alarm = True
uve_list = set()
kfilter = filters.get('kfilt')
sfilter = filters.get('sfilt')
mfilter = filters.get('mfilt')
patterns = None
if kfilter is not None:
patterns = set()
for filt in kfilter:
patterns.add(self.get_uve_regex(filt))
if not sfilter and not mfilter and self._usecache:
rsp = self._uvedbcache.get_uve_list([table], filters, patterns)
if table in rsp:
uve_list = rsp[table]
return uve_list
for r_inst in self._redis_uve_map.keys():
try:
redish = self._redis_inst_get(r_inst)
# For UVE queries, we wanna read both UVE and Alarm table
entries = redish.smembers('ALARM_TABLE:' + table)
if not is_alarm:
entries = entries.union(redish.smembers('TABLE:' + table))
for entry in entries:
info = (entry.split(':', 1)[1]).rsplit(':', 5)
uve_key = info[0]
if kfilter is not None:
kfilter_match = False
for pattern in patterns:
if pattern.match(uve_key):
kfilter_match = True
break
if not kfilter_match:
continue
src = info[1]
if sfilter is not None:
if sfilter != src:
continue
module = info[2]+':'+info[3]+':'+info[4]
if mfilter is not None:
if mfilter != module:
continue
typ = info[5]
if tfilter is not None:
if typ not in tfilter:
continue
if parse_afilter:
if tfilter is not None and len(tfilter[typ]):
valkey = "VALUES:" + table + ":" + uve_key + \
":" + src + ":" + module + ":" + typ
for afilter in tfilter[typ]:
attrval = redish.hget(valkey, afilter)
if attrval is not None:
break
if attrval is None:
continue
uve_list.add(uve_key)
except Exception as e:
self._logger.error("get_uve_list failed %s for : %s tb %s" \
% (str(e), str(r_inst), traceback.format_exc()))
self._redis_inst_down(r_inst)
else:
self._redis_inst_up(r_inst, redish)
return uve_list
# end get_uve_list
# end UVEServer
class ParallelAggregator:
def __init__(self, state, rev_map = {}):
self._state = state
self._rev_map = rev_map
def _default_agg(self, oattr):
itemset = set()
result = []
for source in oattr.keys():
elem = oattr[source]
hdelem = json.dumps(elem)
if hdelem not in itemset:
itemset.add(hdelem)
result.append([elem, source])
else:
for items in result:
if elem in items:
items.append(source)
return result
def _is_sum(self, oattr):
akey = oattr.keys()[0]
if '@aggtype' not in oattr[akey]:
return False
if oattr[akey]['@aggtype'] in ["sum"]:
return True
if oattr[akey]['@type'] in ['i8', 'i16', 'i32', 'i64',
'byte', 'u8', 'u16', 'u32', 'u64']:
if oattr[akey]['@aggtype'] in ["counter"]:
return True
return False
def _is_list_union(self, oattr):
akey = oattr.keys()[0]
if not oattr[akey]['@type'] in ["list"]:
return False
if '@aggtype' not in oattr[akey]:
return False
if oattr[akey]['@aggtype'] in ["union"]:
return True
else:
return False
def _is_map_union(self, oattr):
akey = oattr.keys()[0]
if not oattr[akey]['@type'] in ["map"]:
return False
if '@aggtype' not in oattr[akey]:
return False
if oattr[akey]['@aggtype'] in ["union"]:
return True
else:
return False
def _is_append(self, oattr):
akey = oattr.keys()[0]
if not oattr[akey]['@type'] in ["list"]:
return False
if '@aggtype' not in oattr[akey]:
return False
if oattr[akey]['@aggtype'] in ["append"]:
return True
else:
return False
@staticmethod
def get_list_name(attr):
sname = ""
for sattr in attr['list'].keys():
if sattr[0] not in ['@']:
sname = sattr
return sname
@staticmethod
def _get_list_key(elem):
skey = ""
for sattr in elem.keys():
if '@aggtype' in elem[sattr]:
if elem[sattr]['@aggtype'] in ["listkey"]:
skey = sattr
return skey
def _sum_agg(self, oattr):
akey = oattr.keys()[0]
result = copy.deepcopy(oattr[akey])
count = 0
for source in oattr.keys():
count += int(oattr[source]['#text'])
result['#text'] = str(count)
return result
def _list_union_agg(self, oattr):
akey = oattr.keys()[0]
result = {}
for anno in oattr[akey].keys():
if anno[0] == "@":
result[anno] = oattr[akey][anno]
itemset = set()
sname = ParallelAggregator.get_list_name(oattr[akey])
result['list'] = {}
result['list'][sname] = []
result['list']['@type'] = oattr[akey]['list']['@type']
siz = 0
for source in oattr.keys():
if isinstance(oattr[source]['list'][sname], basestring):
oattr[source]['list'][sname] = [oattr[source]['list'][sname]]
for elem in oattr[source]['list'][sname]:
hdelem = json.dumps(elem)
if hdelem not in itemset:
itemset.add(hdelem)
result['list'][sname].append(elem)
siz += 1
result['list']['@size'] = str(siz)
return result
def _map_union_agg(self, oattr):
akey = oattr.keys()[0]
result = {}
for anno in oattr[akey].keys():
if anno[0] == "@":
result[anno] = oattr[akey][anno]
result['map'] = {}
result['map']['@key'] = 'string'
result['map']['@value'] = oattr[akey]['map']['@value']
result['map']['element'] = []
siz = 0
for source in oattr.keys():
for subidx in range(0,int(oattr[source]['map']['@size'])):
result['map']['element'].append(source + ":" + \
json.dumps(oattr[source]['map']['element'][subidx*2]))
result['map']['element'].append(\
oattr[source]['map']['element'][(subidx*2) + 1])
siz += 1
result['map']['@size'] = str(siz)
return result
def _append_agg(self, oattr):
akey = oattr.keys()[0]
result = copy.deepcopy(oattr[akey])
sname = ParallelAggregator.get_list_name(oattr[akey])
result['list'][sname] = []
siz = 0
for source in oattr.keys():
if not isinstance(oattr[source]['list'][sname], list):
oattr[source]['list'][sname] = [oattr[source]['list'][sname]]
for elem in oattr[source]['list'][sname]:
result['list'][sname].append(elem)
siz += 1
result['list']['@size'] = str(siz)
return result
@staticmethod
def _list_agg_attrs(item):
for ctrs in item.keys():
if '@aggtype'in item[ctrs]:
if item[ctrs]['@aggtype'] in ["listkey"]:
continue
if item[ctrs]['@type'] in ['i8', 'i16', 'i32', 'i64',
'byte', 'u8', 'u16', 'u32', 'u64']:
yield ctrs
@staticmethod
def consolidate_list(result, typ, objattr):
applist = ParallelAggregator.get_list_name(
result[typ][objattr])
appkey = ParallelAggregator._get_list_key(
result[typ][objattr]['list'][applist][0])
# There is no listkey ; no consolidation is possible
if len(appkey) == 0:
return result
# If the list's underlying struct has a listkey present,
# we need to further aggregate entries that have the
# same listkey
mod_result = copy.deepcopy(result[typ][objattr])
mod_result['list'][applist] = []
res_size = 0
mod_result['list']['@size'] = int(res_size)
# Add up stats
for items in result[typ][objattr]['list'][applist]:
matched = False
for res_items in mod_result['list'][applist]:
if items[appkey]['#text'] in [res_items[appkey]['#text']]:
for ctrs in ParallelAggregator._list_agg_attrs(items):
res_items[ctrs]['#text'] += int(items[ctrs]['#text'])
matched = True
if not matched:
newitem = copy.deepcopy(items)
for ctrs in ParallelAggregator._list_agg_attrs(items):
newitem[ctrs]['#text'] = int(items[ctrs]['#text'])
mod_result['list'][applist].append(newitem)
res_size += 1
# Convert results back into strings
for res_items in mod_result['list'][applist]:
for ctrs in ParallelAggregator._list_agg_attrs(res_items):
res_items[ctrs]['#text'] = str(res_items[ctrs]['#text'])
mod_result['list']['@size'] = str(res_size)
return mod_result
def aggregate(self, key, flat, base_url = None):
'''
This function does parallel aggregation of this UVE's state.
It aggregates across all sources and return the global state of the UVE
'''
result = {}
try:
for typ in self._state[key].keys():
result[typ] = {}
for objattr in self._state[key][typ].keys():
if self._is_sum(self._state[key][typ][objattr]):
sum_res = self._sum_agg(self._state[key][typ][objattr])
if flat:
result[typ][objattr] = \
OpServerUtils.uve_attr_flatten(sum_res)
else:
result[typ][objattr] = sum_res
elif self._is_list_union(self._state[key][typ][objattr]):
unionl_res = self._list_union_agg(
self._state[key][typ][objattr])
if flat:
result[typ][objattr] = \
OpServerUtils.uve_attr_flatten(unionl_res)
else:
result[typ][objattr] = unionl_res
elif self._is_map_union(self._state[key][typ][objattr]):
unionm_res = self._map_union_agg(
self._state[key][typ][objattr])
if flat:
result[typ][objattr] = \
OpServerUtils.uve_attr_flatten(unionm_res)
else:
result[typ][objattr] = unionm_res
elif self._is_append(self._state[key][typ][objattr]):
result[typ][objattr] = self._append_agg(
self._state[key][typ][objattr])
append_res = ParallelAggregator.consolidate_list(
result, typ, objattr)
if flat:
result[typ][objattr] =\
OpServerUtils.uve_attr_flatten(append_res)
else:
result[typ][objattr] = append_res
else:
default_res = self._default_agg(
self._state[key][typ][objattr])
if flat:
if (len(default_res) == 1):
result[typ][objattr] =\
OpServerUtils.uve_attr_flatten(
default_res[0][0])
else:
nres = []
for idx in range(len(default_res)):
nres.append(default_res[idx])
nres[idx][0] =\
OpServerUtils.uve_attr_flatten(
default_res[idx][0])
result[typ][objattr] = nres
else:
result[typ][objattr] = default_res
except KeyError:
pass
return result
if __name__ == '__main__':
uveserver = UVEServer(None, 0, None, None)
gevent.spawn(uveserver.run())
_, uve_state = json.loads(uveserver.get_uve("abc-corp:vn02", False))
print json.dumps(uve_state, indent=4, sort_keys=True)
| 39.142857 | 95 | 0.469153 |
40c68dfdc4119efe58075bfdf7dca2a2dae10bc1 | 11,093 | py | Python | nova/network/quantum/client.py | armaan/nova | 22859fccb95502efcb73ecf2bd827c45c0886bd3 | [
"Apache-2.0"
] | 1 | 2021-11-08T10:11:44.000Z | 2021-11-08T10:11:44.000Z | nova/network/quantum/client.py | armaan/nova | 22859fccb95502efcb73ecf2bd827c45c0886bd3 | [
"Apache-2.0"
] | null | null | null | nova/network/quantum/client.py | armaan/nova | 22859fccb95502efcb73ecf2bd827c45c0886bd3 | [
"Apache-2.0"
] | 1 | 2020-05-10T16:36:03.000Z | 2020-05-10T16:36:03.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Citrix Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Tyler Smith, Cisco Systems
import httplib
import json
import socket
import urllib
# FIXME(danwent): All content in this file should be removed once the
# packaging work for the quantum client libraries is complete.
# At that point, we will be able to just install the libraries as a
# dependency and import from quantum.client.* and quantum.common.*
# Until then, we have simplified versions of these classes in this file.
class JSONSerializer(object):
"""This is a simple json-only serializer to use until we can just grab
the standard serializer from the quantum library.
"""
def serialize(self, data, content_type):
try:
return json.dumps(data)
except TypeError:
pass
return json.dumps(to_primitive(data))
def deserialize(self, data, content_type):
return json.loads(data)
# The full client lib will expose more
# granular exceptions, for now, just try to distinguish
# between the cases we care about.
class QuantumNotFoundException(Exception):
"""Indicates that Quantum Server returned 404"""
pass
class QuantumServerException(Exception):
"""Indicates any non-404 error from Quantum Server"""
pass
class QuantumIOException(Exception):
"""Indicates network IO trouble reaching Quantum Server"""
pass
class api_call(object):
"""A Decorator to add support for format and tenant overriding"""
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
def with_params(*args, **kwargs):
"""Temporarily set format and tenant for this request"""
(format, tenant) = (instance.format, instance.tenant)
if 'format' in kwargs:
instance.format = kwargs['format']
if 'tenant' in kwargs:
instance.tenant = kwargs['tenant']
ret = None
try:
ret = self.func(instance, *args)
finally:
(instance.format, instance.tenant) = (format, tenant)
return ret
return with_params
class Client(object):
"""A base client class - derived from Glance.BaseClient"""
action_prefix = '/v1.0/tenants/{tenant_id}'
"""Action query strings"""
networks_path = "/networks"
network_path = "/networks/%s"
ports_path = "/networks/%s/ports"
port_path = "/networks/%s/ports/%s"
attachment_path = "/networks/%s/ports/%s/attachment"
def __init__(self, host="127.0.0.1", port=9696, use_ssl=False, tenant=None,
format="xml", testing_stub=None, key_file=None,
cert_file=None, logger=None):
"""Creates a new client to some service.
:param host: The host where service resides
:param port: The port where service resides
:param use_ssl: True to use SSL, False to use HTTP
:param tenant: The tenant ID to make requests with
:param format: The format to query the server with
:param testing_stub: A class that stubs basic server methods for tests
:param key_file: The SSL key file to use if use_ssl is true
:param cert_file: The SSL cert file to use if use_ssl is true
"""
self.host = host
self.port = port
self.use_ssl = use_ssl
self.tenant = tenant
self.format = format
self.connection = None
self.testing_stub = testing_stub
self.key_file = key_file
self.cert_file = cert_file
self.logger = logger
def get_connection_type(self):
"""Returns the proper connection type"""
if self.testing_stub:
return self.testing_stub
elif self.use_ssl:
return httplib.HTTPSConnection
else:
return httplib.HTTPConnection
def do_request(self, method, action, body=None,
headers=None, params=None):
"""Connects to the server and issues a request.
Returns the result data, or raises an appropriate exception if
HTTP status code is not 2xx
:param method: HTTP method ("GET", "POST", "PUT", etc...)
:param body: string of data to send, or None (default)
:param headers: mapping of key/value pairs to add as headers
:param params: dictionary of key/value pairs to add to append
to action
"""
# Ensure we have a tenant id
if not self.tenant:
raise Exception(_("Tenant ID not set"))
# Add format and tenant_id
action += ".%s" % self.format
action = Client.action_prefix + action
action = action.replace('{tenant_id}', self.tenant)
if type(params) is dict:
action += '?' + urllib.urlencode(params)
try:
connection_type = self.get_connection_type()
headers = headers or {"Content-Type":
"application/%s" % self.format}
# Open connection and send request, handling SSL certs
certs = {'key_file': self.key_file, 'cert_file': self.cert_file}
certs = dict((x, certs[x]) for x in certs if certs[x] != None)
if self.use_ssl and len(certs):
c = connection_type(self.host, self.port, **certs)
else:
c = connection_type(self.host, self.port)
if self.logger:
self.logger.debug(
_("Quantum Client Request:\n%(method)s %(action)s\n" %
locals()))
if body:
self.logger.debug(body)
c.request(method, action, body, headers)
res = c.getresponse()
status_code = self.get_status_code(res)
data = res.read()
if self.logger:
self.logger.debug("Quantum Client Reply (code = %s) :\n %s" \
% (str(status_code), data))
if status_code == httplib.NOT_FOUND:
raise QuantumNotFoundException(
_("Quantum entity not found: %s" % data))
if status_code in (httplib.OK,
httplib.CREATED,
httplib.ACCEPTED,
httplib.NO_CONTENT):
if data is not None and len(data):
return self.deserialize(data, status_code)
else:
raise QuantumServerException(
_("Server %(status_code)s error: %(data)s"
% locals()))
except (socket.error, IOError), e:
raise QuantumIOException(_("Unable to connect to "
"server. Got error: %s" % e))
def get_status_code(self, response):
"""Returns the integer status code from the response, which
can be either a Webob.Response (used in testing) or httplib.Response
"""
if hasattr(response, 'status_int'):
return response.status_int
else:
return response.status
def serialize(self, data):
if not data:
return None
elif type(data) is dict:
return JSONSerializer().serialize(data, self.content_type())
else:
raise Exception(_("unable to deserialize object of type = '%s'" %
type(data)))
def deserialize(self, data, status_code):
if status_code == 202:
return data
return JSONSerializer().deserialize(data, self.content_type())
def content_type(self, format=None):
if not format:
format = self.format
return "application/%s" % (format)
@api_call
def list_networks(self):
"""Fetches a list of all networks for a tenant"""
return self.do_request("GET", self.networks_path)
@api_call
def show_network_details(self, network):
"""Fetches the details of a certain network"""
return self.do_request("GET", self.network_path % (network))
@api_call
def create_network(self, body=None):
"""Creates a new network"""
body = self.serialize(body)
return self.do_request("POST", self.networks_path, body=body)
@api_call
def update_network(self, network, body=None):
"""Updates a network"""
body = self.serialize(body)
return self.do_request("PUT", self.network_path % (network), body=body)
@api_call
def delete_network(self, network):
"""Deletes the specified network"""
return self.do_request("DELETE", self.network_path % (network))
@api_call
def list_ports(self, network):
"""Fetches a list of ports on a given network"""
return self.do_request("GET", self.ports_path % (network))
@api_call
def show_port_details(self, network, port):
"""Fetches the details of a certain port"""
return self.do_request("GET", self.port_path % (network, port))
@api_call
def create_port(self, network, body=None):
"""Creates a new port on a given network"""
body = self.serialize(body)
return self.do_request("POST", self.ports_path % (network), body=body)
@api_call
def delete_port(self, network, port):
"""Deletes the specified port from a network"""
return self.do_request("DELETE", self.port_path % (network, port))
@api_call
def set_port_state(self, network, port, body=None):
"""Sets the state of the specified port"""
body = self.serialize(body)
return self.do_request("PUT",
self.port_path % (network, port), body=body)
@api_call
def show_port_attachment(self, network, port):
"""Fetches the attachment-id associated with the specified port"""
return self.do_request("GET", self.attachment_path % (network, port))
@api_call
def attach_resource(self, network, port, body=None):
"""Sets the attachment-id of the specified port"""
body = self.serialize(body)
return self.do_request("PUT",
self.attachment_path % (network, port), body=body)
@api_call
def detach_resource(self, network, port):
"""Removes the attachment-id of the specified port"""
return self.do_request("DELETE",
self.attachment_path % (network, port))
| 36.016234 | 79 | 0.605247 |
60aa400a10f91062a6733f9cc25c5e85b53d57d6 | 562 | py | Python | scripts/remove_pytorch_module.py | HuaizhengZhang/scene-recognition-pytorch1.x-tf2.x | 9fd7f664beb98802a9da67e633684d4cc8a67383 | [
"MIT"
] | 14 | 2020-05-09T09:21:23.000Z | 2021-06-27T07:06:21.000Z | scripts/remove_pytorch_module.py | HuaizhengZhang/scene-recognition-pytorch1.x-tf2.x | 9fd7f664beb98802a9da67e633684d4cc8a67383 | [
"MIT"
] | 1 | 2020-05-27T01:52:25.000Z | 2020-05-27T03:52:13.000Z | scripts/remove_pytorch_module.py | HuaizhengZhang/scene-recognition-pytorch1.x | 9fd7f664beb98802a9da67e633684d4cc8a67383 | [
"MIT"
] | 1 | 2021-08-20T12:51:40.000Z | 2021-08-20T12:51:40.000Z | import torch
import torch.nn as nn
import torchvision.models as models
arch = 'squeezenet1_0'
model_file = 'checkpoints/%s_best.pth.tar' % arch
save_pt_path = 'weights/%s_places365.pt' % arch
# https://discuss.pytorch.org/t/missing-keys-unexpected-keys-in-state-dict-when-loading-self-trained-model/22379/8
model = models.__dict__[arch](num_classes=365)
checkpoint = torch.load(model_file)
model = nn.DataParallel(model)
model.load_state_dict(checkpoint['state_dict'])
torch.save(model.module.state_dict(), save_pt_path)
print("Finish saving pytorch model")
| 31.222222 | 114 | 0.786477 |
b38e06e951d26fe715afe579b6b4e00fb0fe0ffe | 5,078 | py | Python | logo/tleePicEditor.py | GaTech-IEEE-Hardware/administrative | bb0684b33996834c2a8d0e47306b05a7cfd7fab3 | [
"MIT"
] | null | null | null | logo/tleePicEditor.py | GaTech-IEEE-Hardware/administrative | bb0684b33996834c2a8d0e47306b05a7cfd7fab3 | [
"MIT"
] | null | null | null | logo/tleePicEditor.py | GaTech-IEEE-Hardware/administrative | bb0684b33996834c2a8d0e47306b05a7cfd7fab3 | [
"MIT"
] | null | null | null | """
Background Remover
==================
Function to remove backgrounds which fit user input criteria from pictures
- Saves files as .png for transparency
- Can be modified to remove black or white backgrounds
- Can also be modified to remove picture and leave white background
- Created by tlee753
- Last Modified 6/29/16
- Version 1.4
"""
from PIL import Image
from numpy import array
# import required methods from libraries
def backRem():
inputFile = raw_input("Please type file name string without quotations. \n")
# asks user to input file name
try:
image = Image.open(inputFile)
image = image.convert('RGBA')
print "Image successfully loaded.\n"
# opens image and converts to Red, Green, Blue, Alpha interpretation
except IOError as error:
print ("Unable to load file. Please check to make sure file exists and is spelled correctly.\n")
print ("=" * 50)
print ("Error Arguments: " + str(error.args))
print ("=" * 50)
# errors if the input is not an image file
data = array(image)
# converts the image into four interconnected arrays of RGBA values ranging from 0 to 255
[red, green, blue, alpha] = data.T
# splits the four layers of the array into separate single layered arrays
maskInput = raw_input("Would you like to remove RGB pixels with values above mask? Default is yes/above to remove white background.\n")
# creates an option for removing pixels above or below the user specified values
maskInput = maskInput.lower()
maskInput = maskInput.replace(" ", "")
# adjusts the user input for easier response comparison
if (maskInput == "above" or maskInput == "yes" or maskInput == "" or maskInput == "y" or maskInput == "a" or maskInput == "up"):
maskBool = True
print "Removing pixels above user values.\n"
elif (maskInput == "below" or maskInput == "no" or maskInput == "n" or maskInput == "b" or maskInput == "down"):
maskBool = False
print "Removing pixels below user values.\n"
else:
raise ValueError('\nUser input is not an available option.\n')
# sets the mask boolean to above or below depending on the user input
redValue = raw_input("Value of red for mask. Default: 240 \n")
greenValue = raw_input("Value of green for mask. Default: 240 \n")
blueValue = raw_input("Value of blue for mask. Default: 240 \n")
alphaValue = raw_input("Value of alpha for mask. Always removes values above user input. Default: 255 \n")
# asks the user for values to mask the background with
try:
redValue = int(redValue)
greenValue = int(greenValue)
blueValue = int(blueValue)
alphaValue = int(alphaValue)
# converts user input to type int
if (redValue < 0 or redValue > 255 or greenValue < 0 or greenValue > 255 or blueValue < 0 or blueValue > 255 or alphaValue < 0 or alphaValue > 255):
raise ValueError('\nUser input is not within specified integer value range between 0 and 255.\n')
# check to see if values are within range 0 - 255
except ValueError as error:
print ("\nPlease input integer values between 0 and 255 for mask.\n")
print ("=" * 50)
print ("Error Arguments: " + str(error.args))
print ("=" * 50)
# errors if the values are not integers between 0 and 255
except AttributeError as error:
print ("\nPlease input integer values between \n")
print ("=" * 50)
print ("Error Arguments: " + str(error.args))
print ("=" * 50)
if maskBool:
mask = (red >= redValue) & (green >= greenValue) & (blue >= blueValue) & (alpha >= alphaValue)
else:
mask = (red <= redValue) & (green <= greenValue) & (blue <= blueValue) & (alpha >= alphaValue)
# creates a mask for pixels that fit user defined parameters
data[mask.T] = (0, 0, 0, 0)
# alters pixels included in the mask to be transparent
image = Image.fromarray(data)
# converts four interconnected arrays back into image interpretation
try:
outputFile = inputFile[0:len(inputFile)-4] + "_edited.png"
image.save(outputFile)
# creates a new file name based on the input and saves the edited image under that name
print ("\nConversion complete.\n")
# prints confirmation of successful conversion for user
except Exception as error:
print ("Conversion failed. Please check inputs and then contact administrator.")
print ("=" * 50)
print ("Error Arguments: " + str(error.args))
print ("=" * 50)
# error message if image conversion fails
backRem()
# function call
# shortcuts to remove white/black backgrounds -> overload function
# - just file name string will remove white background
# - file name followed by "white" or "black" string serve as shortcuts for background removal
# add better default support with mask values in particular
# alpha support (above and below) -> four cases
# exception if values aren't int
| 39.364341 | 156 | 0.662662 |
1fa1b263c34d27f285c1e6c5a227ad9662ef6198 | 1,394 | py | Python | operationPackage/cannyDialog.py | nicocccccchou/graduate-pyqt | 4624bdfbbaa33aa979c66e9bd9fa77904cde948b | [
"MIT"
] | null | null | null | operationPackage/cannyDialog.py | nicocccccchou/graduate-pyqt | 4624bdfbbaa33aa979c66e9bd9fa77904cde948b | [
"MIT"
] | null | null | null | operationPackage/cannyDialog.py | nicocccccchou/graduate-pyqt | 4624bdfbbaa33aa979c66e9bd9fa77904cde948b | [
"MIT"
] | null | null | null | from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
class CannyDialog(QDialog):
def __init__(self, parent=None):
super(CannyDialog, self).__init__(parent)
self.setWindowTitle('CannyDialog')
# 在布局中添加控件
layout = QVBoxLayout(self)
self.form = QFormLayout(self)
self.minThreshValLabel = QLabel(self)
self.minThreshValLabel.setText("低阈值minVal")
self.minThreshValLine = QLineEdit(self)
self.maxThreshValLabel = QLabel(self)
self.maxThreshValLabel.setText("高阈值maxVal")
self.maxThreshValLine = QLineEdit(self)
self.apertureSizeLabel = QLabel(self)
self.apertureSizeLabel.setText("Sobel算子的孔径大小")
self.apertureSizeLine = QLineEdit(self)
self.form.addRow(self.minThreshValLabel, self.minThreshValLine)
self.form.addRow(self.maxThreshValLabel, self.maxThreshValLine)
self.form.addRow(self.apertureSizeLabel, self.apertureSizeLine)
layout.addLayout(self.form)
buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
layout.addWidget(buttons)
def getData(self):
return self.minThreshValLine.text(),self.maxThreshValLine.text(),self.apertureSizeLine.text()
| 31.681818 | 102 | 0.702296 |
e624ff33794cb84b6c4ca809d58c41e6120bfa33 | 530 | py | Python | qiling/examples/hello_x8664_linux_customapi.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | 2 | 2021-05-05T12:03:01.000Z | 2021-06-04T14:27:15.000Z | qiling/examples/hello_x8664_linux_customapi.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | null | null | null | qiling/examples/hello_x8664_linux_customapi.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | 2 | 2021-05-05T12:03:09.000Z | 2021-06-04T14:27:21.000Z | #!/usr/bin/env python3
#
# Cross Platform and Multi Architecture Advanced Binary Emulation Framework
#
import sys
sys.path.append("..")
from qiling import Qiling
from qiling.os.const import STRING
from qiling.const import QL_VERBOSE
def my_puts(ql: Qiling):
params = ql.os.resolve_fcall_params({'s': STRING})
print(f'puts("{params["s"]}")')
if __name__ == "__main__":
ql = Qiling(["rootfs/x8664_linux/bin/x8664_hello"], "rootfs/x8664_linux", verbose=QL_VERBOSE.DEBUG)
ql.set_api('puts', my_puts)
ql.run()
| 25.238095 | 103 | 0.713208 |
7db27d1e8ca734df2855456c23b4391fe6838a27 | 5,677 | py | Python | graphic_composition_001.py | ayk-caglayan/graphic_composition_tool | 55d773b8299e808ddc8a3a0edffb81d8aafc49ff | [
"MIT"
] | null | null | null | graphic_composition_001.py | ayk-caglayan/graphic_composition_tool | 55d773b8299e808ddc8a3a0edffb81d8aafc49ff | [
"MIT"
] | null | null | null | graphic_composition_001.py | ayk-caglayan/graphic_composition_tool | 55d773b8299e808ddc8a3a0edffb81d8aafc49ff | [
"MIT"
] | null | null | null | #%matplotlib
import matplotlib.pyplot as plt
from matplotlib.backend_bases import MouseButton, KeyEvent
import pyperclip, pickle, sys, time
#plt.ion() #matplotlib interaction on
global activated_plot_nr
activated_plot_nr=0 #this functions as a flag for activated plot to insert and export the data
fig, ax = plt.subplots()
#file import
if len(sys.argv)>1: #if a pickle file path given, loads objects from it
file_to_be_imported=sys.argv[1]
print(file_to_be_imported, type(file_to_be_imported))
#im_po= pickle.load(open(sys.argv[1], "rb"))
imported_stuff=pickle.load(open(sys.argv[1], "rb"))
print(imported_stuff)
nr_of_plots=imported_stuff['nr_of_plots']
x_start=imported_stuff['x_start']
x_end=imported_stuff['x_end']
y_start=imported_stuff['y_start']
y_end=imported_stuff['y_end']
x_range=imported_stuff['x_range']
y_range=imported_stuff['y_range']
plots=imported_stuff['plots']
else: #if not, user inputs through terminal prompt
nr_of_plots=input("Number of plots on a canvas (max. 10): ")
x_start=input("X axis starting value: ")
x_end=input("X axis ending value: ")
y_start=input("Y axis starting value: ")
y_end=input("Y axis ending value: ")
x_range=int(x_end)-int(x_start)
y_range=int(y_end)-int(y_start)
plots={} #dictionary of plots (btw. plots are dictionaries of data points)
for i in range(int(nr_of_plots)):
plots[i]={}
if x_range<20:
x_ticks=1
elif x_range>=20:
x_ticks=int(x_range/20)
if y_range<10:
y_ticks=1
elif y_range>=10:
y_ticks=int(y_range/10)
colors={0: 'blue', 1: 'green', 2:'red', 3: 'yellow', 4:'brown', 5: 'grey', 6: 'olive', 7: 'orange', 8: 'magenta', 9: 'darkgreen'}
color_map=""
for i in range(int(nr_of_plots)):
color_map=color_map+str(i)+". "+str(colors[i]+", ") #formats color map in use
fig.suptitle("LEFT click adds new point, RIGHT removes it, MIDDLE or ctrl+c copies the activated plot to clipboard \n "+"switch colors w/ number keys "+color_map)
fig.text(0,0.01, "CTRL+R writes the plot to a binary file - saved plot file can be opened on the start > python3 graphic_composition_001.py PLOT_21..plt")
def format_file_name():
localtime=time.localtime()
year=str(localtime[0])[2:]
month='0'+str(localtime[1])
day=str(localtime[2])+'_'
hour=str(localtime[3])+'-'+str(localtime[4])
file_name='PLOT_'+year+month+day+hour+'.plt'
return file_name
def redraw():
ax.clear()
#ax.plot(plots[activated_plot_nr].keys(), plots[activated_plot_nr].values(), 'bo')
for i in range(int(nr_of_plots)):
ax.scatter(plots[i].keys(), plots[i].values(), marker='o', color=colors[i], label='plot ' + str(i))
ax.set_title(colors[activated_plot_nr].capitalize()+"Plot w/ nr. "+str(activated_plot_nr)+" is active")
plt.xlim([int(x_start), int(x_end)])
plt.ylim([int(y_start), int(y_end)])
plt.xticks(range(int(x_start), int(x_end),x_ticks))
plt.yticks(range(int(y_start), int(y_end), y_ticks))
plt.grid()
fig.canvas.draw()
def plot_select(event):
global activated_plot_nr
try:
keyy=int(event.key)
if keyy<10 and keyy<int(nr_of_plots):
activated_plot_nr=keyy
print('plot number ', keyy, ' selected')
redraw()
except:
print("invalid input")
def copy_w_ctrl_c(event):
if event.key=='ctrl+c':
sorted_active_plot_x=sorted(plots[activated_plot_nr].keys())
sorted_active_plot_y=[plots[activated_plot_nr].get(x) for x in sorted_active_plot_x]
print("The Data of the ", colors[activated_plot_nr].capitalize(), "Plot NO:", activated_plot_nr, "Pasted To Clipboard ", str(sorted_active_plot_x), str(sorted_active_plot_y))
#pyperclip.copy(str(list(sorted_active_plot_x, sorted_active_plot_y)))
pyperclip.copy(str(sorted_active_plot_x) + str(sorted_active_plot_y))
def on_click(event):
if event.button is MouseButton.LEFT:
x, y = event.xdata, event.ydata
plots[activated_plot_nr][int(x)]=int(y)
redraw()
if event.button is MouseButton.RIGHT:
x, y = event.xdata, event.ydata
plots[activated_plot_nr].pop(int(x))
redraw()
if event.button is MouseButton.MIDDLE:
sorted_active_plot_x=sorted(plots[activated_plot_nr].keys())
sorted_active_plot_y=[plots[activated_plot_nr].get(x) for x in sorted_active_plot_x]
print("The Data of the ", colors[activated_plot_nr].capitalize(), "Plot NO:", activated_plot_nr, "Pasted To Clipboard ", str(sorted_active_plot_x), str(sorted_active_plot_y))
#pyperclip.copy(str([list(plots[activated_plot_nr].keys()), list(plots[activated_plot_nr].values())]))
#pyperclip.copy(str(list(sorted_active_plot_x, sorted_active_plot_y)))
pyperclip.copy(str(sorted_active_plot_x) + str(sorted_active_plot_y))
def save_plot(event):
if event.key=='ctrl+r':
print('saves the plot')
fileName=format_file_name()
dump_obj={'nr_of_plots': nr_of_plots, 'x_start': x_start, 'x_end': x_end, 'y_start': y_start, 'y_end': y_end, 'x_range': x_range, 'y_range': y_range, 'plots':plots}
pickle.dump(dump_obj, open(fileName, 'wb'))
redraw()
plt.connect('key_press_event', plot_select)
plt.connect('button_press_event', on_click)
plt.connect('key_press_event', copy_w_ctrl_c)
plt.connect('key_press_event', save_plot)
plt.xlim([int(x_start), int(x_end)])
plt.ylim([int(y_start), int(y_end)])
plt.xticks(range(int(x_start), int(x_end),x_ticks))
plt.yticks(range(int(y_start), int(y_end), y_ticks))
plt.grid()
plt.show()
| 38.619048 | 182 | 0.683636 |
2cda9ae6b5b814a7f907b2a63311676f05af27cd | 261 | py | Python | day1/py_func/test_funcs.py | rhuey-eqix/pynet-ons | 7c7ce2d1aa263afa7701b2495b0e237110775a1d | [
"Apache-2.0"
] | 1 | 2021-01-11T23:17:26.000Z | 2021-01-11T23:17:26.000Z | day1/py_func/test_funcs.py | rhuey-eqix/pynet-ons | 7c7ce2d1aa263afa7701b2495b0e237110775a1d | [
"Apache-2.0"
] | null | null | null | day1/py_func/test_funcs.py | rhuey-eqix/pynet-ons | 7c7ce2d1aa263afa7701b2495b0e237110775a1d | [
"Apache-2.0"
] | 11 | 2021-12-07T16:04:22.000Z | 2022-02-09T12:20:05.000Z | def my_func(x, y, z=100):
print(f"x is {x}")
print(f"y is {y}")
print(f"z is {z}")
return x + y + z
if __name__ == "__main__":
my_list = [22, 7, 18]
my_dict = {"x": 7, "y": 0, "z": 22}
result = my_func(**my_dict)
print(result)
| 20.076923 | 39 | 0.498084 |
66c3b58c75ed9af51344e49a879c506bc8dd00c2 | 26,564 | py | Python | aws-frauddetector-detector/src/aws_frauddetector_detector/helpers/model_helpers.py | aws-cloudformation/aws-cloudformation-resource-providers-frauddetector | 22b753423496c79c4e327db6e3ff9aff662182ec | [
"Apache-2.0"
] | 4 | 2021-05-24T05:35:05.000Z | 2021-11-08T09:43:48.000Z | aws-frauddetector-detector/src/aws_frauddetector_detector/helpers/model_helpers.py | aws-cloudformation/aws-cloudformation-resource-providers-frauddetector | 22b753423496c79c4e327db6e3ff9aff662182ec | [
"Apache-2.0"
] | 3 | 2021-04-29T20:30:17.000Z | 2021-05-14T16:28:19.000Z | aws-frauddetector-detector/src/aws_frauddetector_detector/helpers/model_helpers.py | aws-cloudformation/aws-cloudformation-resource-providers-frauddetector | 22b753423496c79c4e327db6e3ff9aff662182ec | [
"Apache-2.0"
] | 3 | 2021-04-07T16:03:03.000Z | 2021-10-30T03:25:33.000Z | from typing import List, Optional, Set
from cloudformation_cli_python_lib import (
exceptions,
)
from .. import models
from . import api_helpers, validation_helpers, util
import logging
# Use this logger to forward log messages to CloudWatch Logs.
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.DEBUG)
# Tags
def get_tags_from_tag_models(
tag_models: Optional[List[models.Tag]],
) -> Optional[List[dict]]:
# boto3 afd client doesn't know about the 'Tags' class that cfn creates
if tag_models is None:
return None
return [{"key": tag.Key, "value": tag.Value} for tag in tag_models]
def get_tag_models_from_tags(tags: Optional[List[dict]]) -> Optional[List[models.Tag]]:
# we need to translate our afd tags back to a list of cfn Tag
if tags is None:
return None
return [models.Tag(Key=tag.get("key", ""), Value=tag.get("value", "")) for tag in tags]
def _get_tags_for_given_arn(frauddetector_client, arn):
list_tags_response = api_helpers.call_list_tags_for_resource(frauddetector_client, arn)
return list_tags_response.get("tags", [])
# Detectors
def put_detector_for_model(frauddetector_client, model: models.ResourceModel):
if not model.EventType.Name:
model.EventType.Name = util.extract_name_from_arn(model.EventType.Arn)
tags = get_tags_from_tag_models(model.Tags)
api_helpers.call_put_detector(
frauddetector_client=frauddetector_client,
detector_id=model.DetectorId,
detector_event_type_name=model.EventType.Name,
detector_tags=tags,
detector_description=model.Description,
)
def get_model_for_detector(frauddetector_client, detector, model: models.ResourceModel):
# build model from detector
detector_id = detector.get("detectorId", "")
detector_arn = detector.get("arn", "")
referenced_resources = get_referenced_resources_for_detector(model)
model_to_return = models.ResourceModel(
DetectorId=detector_id,
Arn=detector_arn,
CreatedTime=detector.get("createdTime", ""),
LastUpdatedTime=detector.get("lastUpdatedTime", ""),
Description=detector.get("description", ""),
EventType=None,
DetectorVersionId=None,
DetectorVersionStatus=None,
RuleExecutionMode=None,
Rules=[],
Tags=None,
AssociatedModels=None,
)
# get event type model
event_type_model = get_event_type_and_return_event_type_model(frauddetector_client, model.EventType)
model_to_return.EventType = event_type_model
# get latest detector version info to attach to model
if model.DetectorVersionId:
desired_detector_version = api_helpers.call_get_detector_version(
frauddetector_client, model.DetectorId, model.DetectorVersionId
)
else:
describe_detectors_response = api_helpers.call_describe_detector(frauddetector_client, detector_id)
detector_version_summaries = describe_detectors_response.get("detectorVersionSummaries", [])
max_version_id = max(
{int(dv_summary.get("detectorVersionId", "-1")) for dv_summary in detector_version_summaries}
)
desired_detector_version = api_helpers.call_get_detector_version(
frauddetector_client, model.DetectorId, str(max_version_id)
)
model_to_return.DetectorVersionId = desired_detector_version.get("detectorVersionId", "-1")
model_to_return.DetectorVersionStatus = desired_detector_version.get("status", "")
model_to_return.RuleExecutionMode = desired_detector_version.get("ruleExecutionMode", "")
associated_models: List[models.Model] = []
model_endpoints: List[str] = desired_detector_version.get("externalModelEndpoints", [])
for model_endpoint in model_endpoints:
get_external_models_response = api_helpers.call_get_external_models(frauddetector_client, model_endpoint)
external_models = get_external_models_response.get("externalModels", [])
if not external_models:
# we should never see this block get executed
raise exceptions.NotFound("associatedModel", model_endpoint)
associated_models.append(models.Model(Arn=external_models[0].get("arn", "not/found")))
model_versions: List[dict] = desired_detector_version.get("modelVersions", [])
for model_version in model_versions:
required_attributes = {"modelId", "modelType", "modelVersionNumber"}
if not required_attributes.issubset(model_version.keys()):
# we should never see this block get executed
LOG.error(f"get DV did not include enough information in model versions: {desired_detector_version}")
raise exceptions.NotFound("associatedModel", model_version)
model_version_arn = get_model_version_arn_from_model_version(frauddetector_client, model_version)
associated_models.append(models.Model(Arn=model_version_arn))
model_to_return.AssociatedModels = associated_models
# get rule models to attach
referenced_outcome_names = referenced_resources.get("rule_outcomes")
for rule in desired_detector_version.get("rules", []):
rule_detector_id = rule.get("detectorId", "")
rule_id = rule.get("ruleId", "")
rule_version = rule.get("ruleVersion", "-1")
rule_to_append = get_rule_and_return_rule_model(
frauddetector_client,
rule_detector_id,
rule_id,
rule_version,
referenced_outcome_names,
)
model_to_return.Rules.append(rule_to_append)
# get tags
detector_tags = _get_tags_for_given_arn(frauddetector_client, detector_arn)
# TODO: reorder tags to the same order as the input model to work around contract test bug?
model_to_return.Tags = get_tag_models_from_tags(detector_tags)
return model_to_return
def get_model_version_arn_from_model_version(frauddetector_client, model_version: dict) -> str:
get_mv_response = api_helpers.call_get_model_version(
frauddetector_client=frauddetector_client,
model_id=model_version.get("modelId", "not_found"),
model_type=model_version.get("modelType", "not_found"),
model_version_number=model_version.get("modelVersionNumber", "not_found"),
)
if "arn" not in get_mv_response:
# we should never see this block get executed
LOG.error(f"get MV did not include arn: {get_mv_response}")
raise exceptions.NotFound("associatedModel", model_version)
return get_mv_response.get("arn")
# Rules
def create_rule_for_rule_model(
frauddetector_client, rule_model: models.Rule, detector_model: models.ResourceModel
) -> dict:
rule_tags = get_tags_from_tag_models(rule_model.Tags)
def get_outcome_name(outcome: models.Outcome):
if outcome.Name:
return outcome.Name
return util.extract_name_from_arn(outcome.Arn)
outcome_names = [get_outcome_name(outcome_model) for outcome_model in rule_model.Outcomes]
create_rule_response = api_helpers.call_create_rule(
frauddetector_client=frauddetector_client,
rule_id=rule_model.RuleId,
detector_id=detector_model.DetectorId,
rule_expression=rule_model.Expression,
rule_language=rule_model.Language,
rule_outcomes=outcome_names,
rule_description=rule_model.Description,
rule_tags=rule_tags,
)
return create_rule_response.get("rule")
def get_rule_and_return_rule_model(
frauddetector_client,
detector_id: str,
rule_id: str,
rule_version: str,
referenced_outcomes: set,
) -> models.Rule:
get_rules_response = api_helpers.call_get_rules(
frauddetector_client=frauddetector_client,
detector_id=detector_id,
rule_id=rule_id,
rule_version=rule_version,
)
rule_details = get_rules_response.get("ruleDetails")
if len(rule_details) != 1:
raise exceptions.NotFound("ruleId:ruleVersion", f"{rule_id}:{rule_version}")
rule_detail = rule_details[0]
rule_arn = rule_detail.get("arn", "")
rule_outcome_names = rule_detail.get("outcomes", "")
model_to_return = models.Rule(
Arn=rule_arn,
CreatedTime=rule_detail.get("createdTime", ""),
Description=rule_detail.get("description", ""),
DetectorId=rule_detail.get("detectorId", ""),
Expression=rule_detail.get("expression", ""),
Language=rule_detail.get("language", ""),
LastUpdatedTime=rule_detail.get("lastUpdatedTime", ""),
Outcomes=[],
RuleId=rule_detail.get("ruleId", ""),
RuleVersion=rule_detail.get("ruleVersion", ""),
Tags=None,
)
# attach tag models
rule_tags = _get_tags_for_given_arn(frauddetector_client, rule_arn)
model_to_return.Tags = get_tag_models_from_tags(rule_tags)
# attach outcome models
model_to_return.Outcomes = get_outcomes_model_for_given_outcome_names(
frauddetector_client=frauddetector_client,
outcome_names=rule_outcome_names,
reference_outcome_names=referenced_outcomes,
)
return model_to_return
# EventTypes
def put_event_type_for_event_type_model(frauddetector_client, event_type_model: models.EventType):
# use dependency names directly if defined Inline, otherwise extract name from arn for each
entity_type_names = [
[util.extract_name_from_arn(entity_type.Arn), entity_type.Name][entity_type.Inline]
for entity_type in event_type_model.EntityTypes
]
event_variable_names = [
[util.extract_name_from_arn(event_variable.Arn), event_variable.Name][event_variable.Inline]
for event_variable in event_type_model.EventVariables
]
label_names = [
[util.extract_name_from_arn(label.Arn), label.Name][label.Inline] for label in event_type_model.Labels
]
event_type_tags = get_tags_from_tag_models(event_type_model.Tags)
# call put event type
api_helpers.call_put_event_type(
frauddetector_client=frauddetector_client,
event_type_name=event_type_model.Name,
entity_type_names=entity_type_names,
event_variable_names=event_variable_names,
label_names=label_names,
event_type_description=event_type_model.Description,
event_type_tags=event_type_tags,
)
def get_event_type_and_return_event_type_model(
frauddetector_client, event_type_model: models.EventType
) -> models.EventType:
event_type_name = event_type_model.Name
try:
get_event_types_response = api_helpers.call_get_event_types(frauddetector_client, event_type_name)
event_types = get_event_types_response.get("eventTypes", [])
if len(event_types) != 1:
# if get event types worked but did not return any event types, we have major problems
error_msg = f"get_event_types for {event_type_name} worked but did not return any event types!"
LOG.error(error_msg)
raise exceptions.NotFound("event_type", event_type_name)
event_type = event_types[0]
if not event_type_model.Inline:
LOG.debug(f"{event_type_name} is not inline")
return models.EventType(
Name=event_type.get("name", ""),
Arn=event_type.get("arn", ""),
Tags=None,
Description=None,
EventVariables=None,
Labels=None,
EntityTypes=None,
CreatedTime=None,
LastUpdatedTime=None,
Inline=False,
)
else:
LOG.debug(f"{event_type_name} is inline")
referenced_resources = get_referenced_resources_for_event_type(event_type_model)
return get_model_for_inline_event_type(frauddetector_client, event_type, referenced_resources)
except RuntimeError as e:
raise exceptions.InternalFailure(f"Error occurred while getting an event type: {e}")
def get_model_for_inline_event_type(frauddetector_client, event_type, referenced_resources: dict):
# build model from event type
model = models.EventType(
Name=event_type.get("name", ""),
Tags=[],
Description=event_type.get("description", ""),
EventVariables=[],
Labels=[],
EntityTypes=[],
Arn=event_type.get("arn", ""),
CreatedTime=event_type.get("createdTime", ""),
LastUpdatedTime=event_type.get("lastUpdatedTime", ""),
Inline=True,
)
# attach Tags
event_type_arn = event_type.get("arn", "")
event_type_tags = _get_tags_for_given_arn(frauddetector_client, event_type_arn)
# TODO: reorder tags to the same order as the input model to work around contract test bug?
model.Tags = get_tag_models_from_tags(event_type_tags)
# attach EventVariables
event_variables = event_type.get("eventVariables", [])
model.EventVariables = _get_variables_and_return_event_variables_model(
frauddetector_client, event_variables, referenced_resources["event_variables"]
)
# attach Labels
event_type_labels = event_type.get("labels", [])
model.Labels = _get_labels_and_return_labels_model(
frauddetector_client, event_type_labels, referenced_resources["labels"]
)
# attach EntityTypes
event_type_entity_types = event_type.get("entityTypes", [])
model.EntityTypes = _get_entity_types_and_return_entity_types_model(
frauddetector_client,
event_type_entity_types,
referenced_resources["entity_types"],
)
# remove empty description/tags
if not model.Tags:
del model.Tags
if model.Description is None or model.Description == "":
del model.Description
# return model
return model
# Outcomes
def get_outcomes_model_for_given_outcome_names(frauddetector_client, outcome_names, reference_outcome_names):
outcome_models = []
for outcome_name in outcome_names:
get_outcomes_response = api_helpers.call_get_outcomes(frauddetector_client, outcome_name)
outcomes = get_outcomes_response.get("outcomes", [])
if len(outcomes) != 1:
raise RuntimeError(
f"Error! Expected an existing outcome, but outcome did not exist! outcome {outcome_name}"
)
outcome = outcomes[0]
outcome_arn = outcome.get("arn", "")
LOG.debug(f"checking if outcome {outcome_name} is in {reference_outcome_names}")
if outcome_name in reference_outcome_names:
LOG.debug(f"outcome in reference set, {outcome_name} is not defined inline")
outcome_model = models.Outcome(
Name=outcome_name,
Arn=outcome_arn,
Tags=None,
Description=None,
CreatedTime=None,
LastUpdatedTime=None,
Inline=False,
)
else:
LOG.debug(f"outcome not in reference set, {outcome_name} is inline")
outcome_tags = _get_tags_for_given_arn(frauddetector_client, outcome_arn)
tag_models = get_tag_models_from_tags(outcome_tags)
outcome_model = models.Outcome(
Name=outcome_name,
Tags=tag_models,
Description=outcome.get("description", ""),
Arn=outcome_arn,
CreatedTime=outcome.get("createdTime", ""),
LastUpdatedTime=outcome.get("lastUpdatedTime", ""),
Inline=True,
)
# remove empty description/tags
LOG.debug(f"removing empty descriptions/tags from outcome model: {outcome_model}")
if not outcome_model.Tags:
del outcome_model.Tags
if outcome_model.Description is None or outcome_model.Description == "":
del outcome_model.Description
outcome_models.append(outcome_model)
return outcome_models
# EventVariables
def _get_variables_and_return_event_variables_model(
frauddetector_client, variable_names, reference_variable_names: set
):
collected_variables = []
for variable_name in variable_names:
# use singular get_variables to preserve order (transient contract test bug workaround)
get_variables_response = api_helpers.call_get_variables(frauddetector_client, variable_name)
collected_variables.extend(get_variables_response.get("variables", []))
return _get_event_variables_model_for_given_variables(
frauddetector_client, collected_variables, reference_variable_names
)
def _get_event_variables_model_for_given_variables(frauddetector_client, variables, reference_variable_names: set):
variable_models = []
for variable in variables:
variable_tags = _get_tags_for_given_arn(frauddetector_client, variable.get("arn", ""))
tag_models = get_tag_models_from_tags(variable_tags)
variable_name = util.extract_name_from_arn(variable.get("arn", ""))
LOG.debug(f"checking if {variable_name} is in {reference_variable_names}")
if variable_name in reference_variable_names:
LOG.debug(f"in reference set, {variable_name} is not inline")
variable_model = models.EventVariable(
Arn=variable.get("arn", ""),
Name=variable_name,
Tags=None,
Description=None,
DataType=None,
DataSource=None,
DefaultValue=None,
VariableType=None,
CreatedTime=None,
LastUpdatedTime=None,
Inline=False,
)
else:
LOG.debug(f"not in reference set, {variable_name} is inline")
variable_model = models.EventVariable(
Name=variable.get("name", ""),
Tags=tag_models,
Description=variable.get("description", ""),
DataType=variable.get("dataType", ""),
DataSource=variable.get("dataSource", ""),
DefaultValue=variable.get("defaultValue", ""),
VariableType=variable.get("variableType", ""),
Arn=variable.get("arn", ""),
CreatedTime=variable.get("createdTime", ""),
LastUpdatedTime=variable.get("lastUpdatedTime", ""),
Inline=True,
)
# remove empty description/tags
if not variable_model.Tags:
del variable_model.Tags
if variable_model.Description is None or variable_model.Description == "":
del variable_model.Description
variable_models.append(variable_model)
return variable_models
# Labels
def _get_labels_and_return_labels_model(frauddetector_client, label_names, reference_label_names: set):
label_models = []
for label_name in label_names:
get_labels_response = api_helpers.call_get_labels(frauddetector_client, label_name)
labels = get_labels_response.get("labels", [])
if not labels:
raise RuntimeError(f"Error! Expected an existing label, but label did not exist! label name {label_name}")
label = labels[0]
label_arn = label.get("arn", "")
LOG.debug(f"checking if {label_name} is in {reference_label_names}")
if label_name in reference_label_names:
LOG.debug(f"in reference set, {label_name} is not inline")
label_model = models.Label(
Arn=label_arn,
Name=label_name,
Tags=None,
Description=None,
CreatedTime=None,
LastUpdatedTime=None,
Inline=False,
)
else:
LOG.debug(f"not in reference set, {label_name} is inline")
label_tags = _get_tags_for_given_arn(frauddetector_client, label_arn)
tag_models = get_tag_models_from_tags(label_tags)
label_model = models.Label(
Name=label_name,
Tags=tag_models,
Description=label.get("description", ""),
Arn=label_arn,
CreatedTime=label.get("createdTime", ""),
LastUpdatedTime=label.get("lastUpdatedTime", ""),
Inline=True,
)
# remove empty description/tags
if not label_model.Tags:
del label_model.Tags
if label_model.Description is None or label_model.Description == "":
del label_model.Description
label_models.append(label_model)
return label_models
# EntityTypes
def _get_entity_types_and_return_entity_types_model(
frauddetector_client, entity_type_names: List[str], reference_entity_type_names: set
) -> List[models.EntityType]:
entity_type_models = []
for entity_type_name in entity_type_names:
(
get_entity_types_worked,
get_entity_types_response,
) = validation_helpers.check_if_get_entity_types_succeeds(frauddetector_client, entity_type_name)
if not get_entity_types_worked:
raise RuntimeError(
f"Error! Expected an existing get entity type, "
f"but entity type did not exist! entity type {entity_type_name}"
)
entity_type = get_entity_types_response.get("entityTypes")[0]
entity_type_arn = entity_type.get("arn", "")
LOG.debug(f"checking if {entity_type_name} is in {reference_entity_type_names}")
if entity_type_name in reference_entity_type_names:
LOG.debug(f"in reference set, {entity_type_name} is not inline")
entity_type_model = models.EntityType(
Arn=entity_type_arn,
Name=entity_type_name,
Tags=None,
Description=None,
CreatedTime=None,
LastUpdatedTime=None,
Inline=False,
)
else:
LOG.debug(f"not in reference set, {entity_type_name} is inline")
entity_type_tags = _get_tags_for_given_arn(frauddetector_client, entity_type.get("arn", ""))
tag_models = get_tag_models_from_tags(entity_type_tags)
entity_type_model = models.EntityType(
Name=entity_type_name,
Tags=tag_models,
Description=entity_type.get("description", ""),
Arn=entity_type_arn,
CreatedTime=entity_type.get("createdTime", ""),
LastUpdatedTime=entity_type.get("lastUpdatedTime", ""),
Inline=True,
)
# remove empty description/tags
if not entity_type_model.Tags:
del entity_type_model.Tags
if entity_type_model.Description is None or entity_type_model.Description == "":
del entity_type_model.Description
entity_type_models.append(entity_type_model)
return entity_type_models
# External Models for Detector
def get_external_model_arns_from_model(model: models.ResourceModel) -> Set[str]:
if model.AssociatedModels is None:
return set()
return {m.Arn for m in model.AssociatedModels if util.is_external_model_arn(m.Arn)}
def get_external_model_endpoints_from_model(model: models.ResourceModel) -> List[str]:
if model.AssociatedModels is None:
return []
return [util.extract_name_from_arn(m.Arn) for m in model.AssociatedModels if util.is_external_model_arn(m.Arn)]
# Model Versions for Detector
def get_model_versions_from_model(model: models.ResourceModel) -> List[dict]:
if model.AssociatedModels is None:
return []
model_versions = []
for m in model.AssociatedModels:
if util.is_model_version_arn(m.Arn):
model_id, model_type, model_version_number = util.get_model_version_details_from_arn(m.Arn)
model_versions.append(
{"modelId": model_id, "modelType": model_type, "modelVersionNumber": model_version_number}
)
return model_versions
# Referenced/Inline Resources
def get_referenced_resources_for_event_type(event_type_model: models.EventType) -> dict:
referenced_resources = {
"event_variables": set(),
"labels": set(),
"entity_types": set(),
}
if not event_type_model:
return referenced_resources
LOG.debug(f"building referenced resources for event type model: {event_type_model.Name}")
referenced_resources["event_variables"] = {ev.Name for ev in event_type_model.EventVariables if not ev.Inline}
referenced_resources["labels"] = {label.Name for label in event_type_model.Labels if not label.Inline}
referenced_resources["entity_types"] = {et.Name for et in event_type_model.EntityTypes if not et.Inline}
LOG.debug(f"returning referenced resources: {referenced_resources}")
return referenced_resources
def get_referenced_resources_for_detector(detector_model: models.ResourceModel) -> dict:
referenced_resources = {
"rule_outcomes": set(),
"event_type": set(),
}
if not detector_model:
return referenced_resources
LOG.debug(f"building referenced resources for detector model: {detector_model.DetectorId}")
for rule_model in detector_model.Rules:
for outcome_model in rule_model.Outcomes:
if not outcome_model.Inline:
outcome_name = [
util.extract_name_from_arn(outcome_model.Arn),
outcome_model.Name,
][outcome_model.Name is not None]
referenced_resources["rule_outcomes"].add(outcome_name)
if not detector_model.EventType.Inline:
referenced_resources["event_type"].add(util.extract_name_from_arn(detector_model.EventType.Arn))
LOG.debug(f"returning referenced resources: {referenced_resources}")
return referenced_resources
def get_inline_resources_for_event_type(event_type_model: models.EventType) -> dict:
inline_resources = {
"event_variables": set(),
"labels": set(),
"entity_types": set(),
}
if not event_type_model:
return inline_resources
LOG.debug(f"building inline resources for event type model: {event_type_model.Name}")
inline_resources["event_variables"] = {ev.Name for ev in event_type_model.EventVariables if ev.Inline}
inline_resources["labels"] = {label.Name for label in event_type_model.Labels if label.Inline}
inline_resources["entity_types"] = {et.Name for et in event_type_model.EntityTypes if et.Inline}
LOG.debug(f"returning inline resources: {inline_resources}")
return inline_resources
| 41.057187 | 118 | 0.683481 |
8d22e1234b6f3b82fea53cacd953c5a4f8b1b84f | 934 | py | Python | setup.py | issackelly/biblion | 9d72ed037bd59597e1f02a75778a08f3de72da75 | [
"BSD-3-Clause"
] | null | null | null | setup.py | issackelly/biblion | 9d72ed037bd59597e1f02a75778a08f3de72da75 | [
"BSD-3-Clause"
] | null | null | null | setup.py | issackelly/biblion | 9d72ed037bd59597e1f02a75778a08f3de72da75 | [
"BSD-3-Clause"
] | null | null | null | from distutils.core import setup
# see requirements.txt for dependencies
setup(
name = "biblion",
version = "0.1.dev10",
author = "Eldarion",
author_email = "development@eldarion.com",
description = "the eldarion.com blog app intended to be suitable for site-level company and project blogs",
long_description = open("README.rst").read(),
license = "BSD",
url = "http://github.com/eldarion/biblion",
packages = [
"biblion",
"biblion.templatetags",
],
package_data = {
"biblion": [
"templates/biblion/*.xml",
]
},
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
]
)
| 26.685714 | 111 | 0.591006 |
35711855b080416543af737283e184190a0a82fa | 27,677 | py | Python | pnc_cli/swagger_client/apis/productversions_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 2 | 2016-05-18T15:01:34.000Z | 2016-08-11T14:04:17.000Z | pnc_cli/swagger_client/apis/productversions_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 47 | 2016-06-23T19:58:40.000Z | 2020-03-10T17:58:11.000Z | pnc_cli/swagger_client/apis/productversions_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 21 | 2016-05-30T20:34:17.000Z | 2021-09-07T13:22:20.000Z | # coding: utf-8
"""
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductversionsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_new_product_version(self, **kwargs):
"""
Create a new ProductVersion for a Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_new_product_version(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductVersionRest body:
:return: ProductVersionSingleton
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_new_product_version_with_http_info(**kwargs)
else:
(data) = self.create_new_product_version_with_http_info(**kwargs)
return data
def create_new_product_version_with_http_info(self, **kwargs):
"""
Create a new ProductVersion for a Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_new_product_version_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductVersionRest body:
:return: ProductVersionSingleton
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_new_product_version" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductVersionSingleton',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all(self, **kwargs):
"""
Gets all Product Versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductVersionPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_with_http_info(**kwargs)
else:
(data) = self.get_all_with_http_info(**kwargs)
return data
def get_all_with_http_info(self, **kwargs):
"""
Gets all Product Versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductVersionPage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_index', 'page_size', 'sort', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_index' in params:
query_params.append(('pageIndex', params['page_index']))
if 'page_size' in params:
query_params.append(('pageSize', params['page_size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'q' in params:
query_params.append(('q', params['q']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductVersionPage',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_build_configuration_sets(self, id, **kwargs):
"""
Gets build configuration sets associated with a product version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_build_configuration_sets(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationSetPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_build_configuration_sets_with_http_info(id, **kwargs)
else:
(data) = self.get_build_configuration_sets_with_http_info(id, **kwargs)
return data
def get_build_configuration_sets_with_http_info(self, id, **kwargs):
"""
Gets build configuration sets associated with a product version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_build_configuration_sets_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationSetPage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page_index', 'page_size', 'sort', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_build_configuration_sets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_build_configuration_sets`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'page_index' in params:
query_params.append(('pageIndex', params['page_index']))
if 'page_size' in params:
query_params.append(('pageSize', params['page_size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'q' in params:
query_params.append(('q', params['q']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions/{id}/build-configuration-sets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildConfigurationSetPage',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_specific(self, id, **kwargs):
"""
Gets specific Product Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_specific(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:return: ProductVersionSingleton
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_specific_with_http_info(id, **kwargs)
else:
(data) = self.get_specific_with_http_info(id, **kwargs)
return data
def get_specific_with_http_info(self, id, **kwargs):
"""
Gets specific Product Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_specific_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:return: ProductVersionSingleton
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_specific" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_specific`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductVersionSingleton',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, id, **kwargs):
"""
Updates an existing Product Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param ProductVersionRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_with_http_info(id, **kwargs)
else:
(data) = self.update_with_http_info(id, **kwargs)
return data
def update_with_http_info(self, id, **kwargs):
"""
Updates an existing Product Version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param ProductVersionRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_build_configuration_sets(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_build_configuration_sets(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param list[BuildConfigurationSetRest] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_build_configuration_sets_with_http_info(id, **kwargs)
else:
(data) = self.update_build_configuration_sets_with_http_info(id, **kwargs)
return data
def update_build_configuration_sets_with_http_info(self, id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_build_configuration_sets_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product Version id (required)
:param list[BuildConfigurationSetRest] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_build_configuration_sets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_build_configuration_sets`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/product-versions/{id}/build-configuration-sets', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.369844 | 114 | 0.557214 |
814507ee9751e728c073948f93edf7cc0c409b01 | 568 | py | Python | tests/root_chain/contracts/data_structures/test_priority_queue.py | josojo/plasma-mvp | 2c1d6d324ea164b2081c628170da4bae59c9018e | [
"Apache-2.0"
] | null | null | null | tests/root_chain/contracts/data_structures/test_priority_queue.py | josojo/plasma-mvp | 2c1d6d324ea164b2081c628170da4bae59c9018e | [
"Apache-2.0"
] | null | null | null | tests/root_chain/contracts/data_structures/test_priority_queue.py | josojo/plasma-mvp | 2c1d6d324ea164b2081c628170da4bae59c9018e | [
"Apache-2.0"
] | null | null | null | import pytest
@pytest.fixture
def priority_queue(get_contract):
return get_contract('DataStructures/PriorityQueue.sol')
def test_priority_queue(t, priority_queue):
priority_queue.insert(2)
priority_queue.insert(5)
priority_queue.insert(3)
assert priority_queue.getMin() == 2
priority_queue.insert(1)
assert priority_queue.getMin() == 1
assert priority_queue.delMin() == 1
assert priority_queue.delMin() == 2
assert priority_queue.getMin() == 3
assert priority_queue.delMin() == 3
assert priority_queue.delMin() == 5
| 27.047619 | 59 | 0.725352 |
2e2d6140d6dede0d15f76ba78ec33dfb374a97a5 | 410 | py | Python | funing.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | 1 | 2021-08-22T05:56:09.000Z | 2021-08-22T05:56:09.000Z | funing.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | null | null | null | funing.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import getopt
import os
import sys
from funing import *
from funing.settings import *
if __name__ == '__main__':
sys_argv = sys.argv[1:]
_, args = getopt.getopt(sys_argv, '')
for a in args:
if a in test_args:
os.environ['FUNING_TEST'] = '1'
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
simple()
| 20.5 | 72 | 0.57561 |
89fca8f480bb4d0ebc54305a50cf5ffec00878be | 2,616 | py | Python | main/Main.py | cyzLoveDream/text_dnn_classifier | 9986dd8ac66b2ac4731e28209eba524be1283d47 | [
"Apache-2.0"
] | 17 | 2017-12-14T07:34:59.000Z | 2021-03-23T12:29:40.000Z | main/Main.py | cyzLoveDream/text_dnn_classifier | 9986dd8ac66b2ac4731e28209eba524be1283d47 | [
"Apache-2.0"
] | null | null | null | main/Main.py | cyzLoveDream/text_dnn_classifier | 9986dd8ac66b2ac4731e28209eba524be1283d47 | [
"Apache-2.0"
] | 2 | 2017-12-14T07:57:56.000Z | 2018-06-08T09:42:01.000Z | import time
from untils import tools
from untils import Word2Vec
import os
import warnings
import pandas as pd
warnings.filterwarnings("ignore")
def main():
train_path,test_path,user_dict,w2v_path,\
sub_path,train_number,test_number,max_len,\
batch_size,epoch,ckpt,model_file,embedding_size,eval_test = tools.tools().load_params()
start = time.time()
# first train word2vec model
print("begin train w2v model...")
w2v_time_start = time.time()
if not os.path.exists(w2v_path):
w2v_path = Word2Vec.Word2Vec().train_w2v_model(train_path, test_path,w2v_path,user_dict)
else:
print("the w2v model is trained...")
print("finish w2v model in time: ",time.time() - w2v_time_start)
# second generate word2index, index2word, embeddings_index
print("generate word2index, index2word, embeddings_index...")
w2i_time_start = time.time()
embedding_matrix,vocab_size,word2index,index2word = tools.tools().generate_word2index(w2v_path)
print("finish word2index, index2word, embeddings_index: ",time.time() - w2i_time_start)
print("begin load data...")
start_load = time.time()
x_train,x_test,y_train,y_test = tools.tools().load_train(train_path,
"../data_example/train.tfrecords",
word2index,
max_len= max_len,
user_dict= user_dict,
sample_num=train_number)
tests,id_list = tools.tools().load_test(test_path,
"../data_example/test.tfrecords",
word2index,
max_len=max_len,
user_dict=user_dict,
sample_num=test_number)
print("finish load data in time: ", time.time() - start_load)
tools.tools().train_model(vocab_size, x_train, y_train, x_test, y_test, embedding_matrix,
batch_size, epoch, ckpt, model_file,max_len,embedding_size)
pred = tools.tools().predict(tests,id_list,vocab_size,embedding_matrix,batch_size,
ckpt,max_len,embedding_size,sub_path)
test_pd = pd.read_csv(eval_test)
test_pd["txt_label"] = test_pd["txt_label"].apply(lambda x: 1 if x == "POSITIVE" else 0)
f1_score = tools.tools().eval_test(test_pd.txt_label.values, pred)
print("the final result f1_score is: ", f1_score)
print("finish the all precessing in time: ",time.time() - start)
if __name__ == '__main__':
main() | 46.714286 | 96 | 0.622324 |
8c59f9660220f2bb6a534c61d9f5a1bce7343205 | 3,529 | py | Python | bindings/python/ensmallen/datasets/string/vibrionavarrensis.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 5 | 2021-02-17T00:44:45.000Z | 2021-08-09T16:41:47.000Z | bindings/python/ensmallen/datasets/string/vibrionavarrensis.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 18 | 2021-01-07T16:47:39.000Z | 2021-08-12T21:51:32.000Z | bindings/python/ensmallen/datasets/string/vibrionavarrensis.py | AnacletoLAB/ensmallen | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 3 | 2021-01-14T02:20:59.000Z | 2021-08-04T19:09:52.000Z | """
This file offers the methods to automatically retrieve the graph Vibrio navarrensis.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def VibrioNavarrensis(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Vibrio navarrensis graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Vibrio navarrensis graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="VibrioNavarrensis",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 32.675926 | 223 | 0.675829 |
2144bf3f7008d65c2786e65205cec249d3d8ce99 | 1,250 | py | Python | train_simple_conv_net.py | kiseonjeong/neural-network-for-python | 902307699d59a0a38d45519a0bacd014ba705727 | [
"MIT"
] | 1 | 2020-07-08T15:47:00.000Z | 2020-07-08T15:47:00.000Z | train_simple_conv_net.py | kiseonjeong/neural-network-for-python | 902307699d59a0a38d45519a0bacd014ba705727 | [
"MIT"
] | null | null | null | train_simple_conv_net.py | kiseonjeong/neural-network-for-python | 902307699d59a0a38d45519a0bacd014ba705727 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pylab as plt
from dataset.mnist import *
from network.optimizer import *
from network.simple_conv_net import SimpleConvNet
from network.trainer import *
# Load the MNIST dataset
(x_train, t_train), (x_test, t_test) = load_mnist(flatten=False)
# Do test for overfit
x_train = x_train[:5000]
t_train = t_train[:5000]
# Generate the dataset
dataset_train = Dataset(x_train, t_train)
dataset_test = Dataset(x_test, t_test)
# The hyperparameters
max_epochs = 20
network = SimpleConvNet(input_dim=(1, 28, 28), conv_param = {'filter_num': 30, 'filter_size': 5, 'pad': 0, 'stride': 1},
num_hidden_node=100, num_output_node=10, weight_init_std=0.01)
trainer = NetTrainer(dataset_train, dataset_test, network, max_epochs, mini_batch_size=100, optimizer='Adam', optimizer_param={'lr': 0.001}, eval_sample_num_per_epoch=1000)
# Do training on the network
trainer.train_network()
# Save the training results
network.save_params("simple_conv_net_params.pkl")
print("Save the training results")
# Show the results
plt.plot(trainer.train_acc_list, label='train')
plt.plot(trainer.test_acc_list, label='test')
plt.xlabel('epochs')
plt.ylabel('accuracy')
plt.ylim([0.0, 1.0])
plt.legend()
plt.show()
| 31.25 | 172 | 0.7544 |
85549cc8ba910d6ee92554dc173197768761eb31 | 530 | py | Python | monitoring/mock_ridsp/__init__.py | Orbitalize/InterUSS-Platform | a1d60ec928dc5c63f9dcddd195bfeda7c4c1c84b | [
"Apache-2.0"
] | 58 | 2019-10-03T19:15:47.000Z | 2022-03-09T16:50:47.000Z | monitoring/mock_ridsp/__init__.py | Orbitalize/InterUSS-Platform | a1d60ec928dc5c63f9dcddd195bfeda7c4c1c84b | [
"Apache-2.0"
] | 283 | 2019-09-30T18:35:02.000Z | 2022-03-29T13:36:53.000Z | monitoring/mock_ridsp/__init__.py | Orbitalize/InterUSS-Platform | a1d60ec928dc5c63f9dcddd195bfeda7c4c1c84b | [
"Apache-2.0"
] | 51 | 2019-10-08T18:47:36.000Z | 2022-03-23T08:44:06.000Z | import flask
from .config import Config
webapp = flask.Flask(__name__)
webapp.config.from_object(Config)
print(
'################################################################################\n' + \
'################################ Configuration ################################\n' + \
'\n'.join('## {}: {}'.format(key, webapp.config[key]) for key in webapp.config) + '\n' + \
'################################################################################', flush=True)
from monitoring.mock_ridsp import routes
| 35.333333 | 97 | 0.379245 |
44db78e98e2cef7efabc5042e3519eff80c8d626 | 504 | py | Python | env/lib/python3.8/site-packages/plotly/validators/densitymapbox/hoverlabel/font/_familysrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 76 | 2020-07-06T14:44:05.000Z | 2022-02-14T15:30:21.000Z | env/lib/python3.8/site-packages/plotly/validators/densitymapbox/hoverlabel/font/_familysrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 11 | 2020-08-09T02:30:14.000Z | 2022-03-12T00:50:14.000Z | env/lib/python3.8/site-packages/plotly/validators/densitymapbox/hoverlabel/font/_familysrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 11 | 2020-07-12T16:18:07.000Z | 2022-02-05T16:48:35.000Z | import _plotly_utils.basevalidators
class FamilysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name="familysrc",
parent_name="densitymapbox.hoverlabel.font",
**kwargs
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
| 28 | 68 | 0.617063 |
d9b1e1db0daff6394f6c076784af99eba4eb0dee | 2,696 | py | Python | tests/operators/vector/test_squeeze_ad_001.py | laekov/akg | 5316b8cb2340bbf71bdc724dc9d81513a67b3104 | [
"Apache-2.0"
] | 1 | 2020-08-31T02:43:43.000Z | 2020-08-31T02:43:43.000Z | tests/operators/vector/test_squeeze_ad_001.py | laekov/akg | 5316b8cb2340bbf71bdc724dc9d81513a67b3104 | [
"Apache-2.0"
] | null | null | null | tests/operators/vector/test_squeeze_ad_001.py | laekov/akg | 5316b8cb2340bbf71bdc724dc9d81513a67b3104 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
################################################
Testcase_PrepareCondition:
Testcase_TestSteps:
Testcase_ExpectedResult:
"""
import os
from base import TestBase
import pytest
from test_run.squeeze_ad_run import squeeze_ad_run
class TestCase(TestBase):
def setup(self):
"""
testcase preparcondition
:return:
"""
case_name = "test_akg_squeeze_001"
case_path = os.getcwd()
# params init
self.params_init(case_name, case_path)
self.caseresult = True
self._log.info("============= {0} Setup case============".format(self.casename))
self.testarg = [
("test_squeeze_16_1__1", squeeze_ad_run, [(16, 1), 1, "int32"]),
("test_squeeze_8_16_1__2", squeeze_ad_run, [(8, 16, 1), 2, "int32"]),
("test_squeeze_8_1_16_1__none", squeeze_ad_run, [(8, 1, 16, 1), None, "int32"]),
("test_squeeze_1_1_8_16__0", squeeze_ad_run, [(1, 1, 8, 16), 0, "float16"]),
("test_squeeze_8_1_16_16__1", squeeze_ad_run, [(8, 1, 16, 16), 1, "float16"]),
("test_squeeze_1_3_1_4_1__0_2", squeeze_ad_run, [(1, 3, 1, 4, 1), (0, 2), "int32"]),
]
self.testarg_cloud = [
#("test_squeeze_1_1_8_16__0", squeeze_run, [(1,1,8,16), 0, "float32", "squeeze"], [(1,1),(1,1),(8,8),(16,16)]),
]
return
@pytest.mark.level0
@pytest.mark.env_onecard
@pytest.mark.platform_x86_ascend_training
def test_run(self):
"""
run case.#
:return:
"""
self.common_run(self.testarg)
@pytest.mark.aicmodel
@pytest.mark.env_onecard
@pytest.mark.platform_x86_ascend_training
def test_run_cloud(self):
"""
run case.#
:return:
"""
self.common_run(self.testarg_cloud)
def teardown(self):
"""
clean environment
:return:
"""
self._log.info("============= {0} Teardown============".format(self.casename))
return
| 30.988506 | 124 | 0.581231 |
978e9656fd6810dcd9abf0f1b6df216d6b1e8d0b | 3,440 | py | Python | code/translator/hot/tosca/tests/test_tosca_autoscaling.py | superfluidity/RDCL3D | 3c5717941bd4046aa1be178e9004db1dc1c469a0 | [
"Apache-2.0"
] | 8 | 2017-03-13T16:34:28.000Z | 2021-11-16T11:35:56.000Z | code/translator/hot/tosca/tests/test_tosca_autoscaling.py | superfluidity/RDCL3D | 3c5717941bd4046aa1be178e9004db1dc1c469a0 | [
"Apache-2.0"
] | null | null | null | code/translator/hot/tosca/tests/test_tosca_autoscaling.py | superfluidity/RDCL3D | 3c5717941bd4046aa1be178e9004db1dc1c469a0 | [
"Apache-2.0"
] | 3 | 2017-03-28T09:26:40.000Z | 2020-12-08T14:16:12.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from toscaparser.nodetemplate import NodeTemplate
from toscaparser.policy import Policy
from toscaparser.tests.base import TestCase
import toscaparser.utils.yamlparser
from translator.hot.tosca.tosca_compute import ToscaCompute
from translator.hot.tosca.tosca_policies_scaling import ToscaAutoscaling
class AutoscalingTest(TestCase):
def _tosca_scaling_test(self, tpl_snippet, expectedprops):
nodetemplates = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet)['node_templates'])
policies = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet)['policies'])
name = list(nodetemplates.keys())[0]
policy_name = list(policies[0].keys())[0]
for policy in policies:
tpl = policy[policy_name]
targets = tpl["targets"]
properties = tpl["properties"]
try:
nodetemplate = NodeTemplate(name, nodetemplates)
toscacompute = ToscaCompute(nodetemplate)
toscacompute.handle_properties()
policy = Policy(policy_name, tpl, targets,
properties, "node_templates")
toscascaling = ToscaAutoscaling(policy)
parameters = toscascaling.handle_properties([toscacompute])
self.assertEqual(parameters[0].properties, expectedprops)
except Exception:
raise
def test_compute_with_scaling(self):
tpl_snippet = '''
node_templates:
my_server_1:
type: tosca.nodes.Compute
capabilities:
host:
properties:
num_cpus: 2
disk_size: 10 GB
mem_size: 512 MB
os:
properties:
# host Operating System image properties
architecture: x86_64
type: Linux
distribution: RHEL
version: 6.5
policies:
- asg:
type: tosca.policies.Scaling
description: Simple node autoscaling
targets: [my_server_1]
triggers:
resize_compute:
description: trigger
condition:
constraint: utilization greater_than 50%
period: 60
evaluations: 1
method: average
properties:
min_instances: 2
max_instances: 10
default_instances: 3
increment: 1
'''
expectedprops = {'desired_capacity': 3,
'max_size': 10,
'min_size': 2,
'resource': {'type': 'asg_res.yaml'}}
self._tosca_scaling_test(
tpl_snippet,
expectedprops)
| 37.391304 | 78 | 0.57907 |
4cd053fc1d770fe259998bc493b15c95199a4c8a | 194 | py | Python | setup.py | EthanTGo/ethango-numbergame | f528343287b4de45873425f02d91b971dda62a6e | [
"MIT"
] | null | null | null | setup.py | EthanTGo/ethango-numbergame | f528343287b4de45873425f02d91b971dda62a6e | [
"MIT"
] | null | null | null | setup.py | EthanTGo/ethango-numbergame | f528343287b4de45873425f02d91b971dda62a6e | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name = "ethango_numbergame",
version = 0.1,
description = "Number guessing game",
packages = ["ethango_numbergame"],
zip_safe = False
)
| 17.636364 | 41 | 0.664948 |
2d25685d8f6d0bd7a9bcfd4524bad1a9925d6720 | 1,081 | py | Python | Python/find-palindrome-with-fixed-length.py | Priyansh2/LeetCode-Solutions | d613da1881ec2416ccbe15f20b8000e36ddf1291 | [
"MIT"
] | 4 | 2018-10-11T17:50:56.000Z | 2018-10-11T21:16:44.000Z | Python/find-palindrome-with-fixed-length.py | Priyansh2/LeetCode-Solutions | d613da1881ec2416ccbe15f20b8000e36ddf1291 | [
"MIT"
] | null | null | null | Python/find-palindrome-with-fixed-length.py | Priyansh2/LeetCode-Solutions | d613da1881ec2416ccbe15f20b8000e36ddf1291 | [
"MIT"
] | 4 | 2018-10-11T18:50:32.000Z | 2018-10-12T00:04:09.000Z | # Time: O(n * l)
# Space: O(1)
# math
class Solution(object):
def kthPalindrome(self, queries, intLength):
"""
:type queries: List[int]
:type intLength: int
:rtype: List[int]
"""
def reverse(x):
result = 0
while x:
result = result*10+x%10
x //= 10
return result
def f(l, x):
x = 10**((l-1)//2)+(x-1)
if x > 10**((l+1)//2)-1:
return -1
return x*10**(l//2)+reverse(x//10 if l%2 else x)
return [f(intLength, x) for x in queries]
# Time: O(n * l)
# Space: O(l)
# math
class Solution2(object):
def kthPalindrome(self, queries, intLength):
"""
:type queries: List[int]
:type intLength: int
:rtype: List[int]
"""
def f(l, x):
if 10**((l-1)//2)+(x-1) > 10**((l+1)//2)-1:
return -1
s = str(10**((l-1)//2)+(x-1))
return int(s+s[::-1][l%2:])
return [f(intLength, x) for x in queries]
| 24.022222 | 60 | 0.432932 |
8af1d47e834c5f128ae9407d68011e8195747e29 | 5,480 | py | Python | src/lib/mine/utility/filesystem.py | rdw20170120/workstation | ed19aa930a83885c2a8cb58eb0bb5afe58f95df3 | [
"MIT"
] | null | null | null | src/lib/mine/utility/filesystem.py | rdw20170120/workstation | ed19aa930a83885c2a8cb58eb0bb5afe58f95df3 | [
"MIT"
] | 2 | 2021-04-06T18:07:32.000Z | 2021-06-02T01:50:40.000Z | src/lib/mine/utility/filesystem.py | rdw20170120/workstation | ed19aa930a83885c2a8cb58eb0bb5afe58f95df3 | [
"MIT"
] | null | null | null | #!/usr/bin/env false
"""TODO: Write
TODO: REVIEW: this module against its siblings.
"""
# Internal packages (absolute references, distributed with Python)
from logging import getLogger
from os import link
from os import makedirs
from os import remove
from os.path import getsize
from os.path import isdir
from os.path import isfile
from pathlib import Path
from pathlib import PurePath
from shutil import copy2
from shutil import rmtree
# External packages (absolute references, NOT distributed with Python)
# Library modules (absolute references, NOT packaged, in project)
from utility import my_assert as is_
# Project modules (relative references, NOT packaged, in project)
log = getLogger(__name__)
def basename_has_suffix(path_, suffix):
assert is_.instance(suffix, str)
return str(path_).endswith(suffix)
def clone_file(target_file, source_file):
assert is_.absolute_file(source_file)
try:
actual = copy2(str(source_file), str(target_file))
assert is_.equal(actual, target_file)
except BaseException:
log.error("Failed to clone file %s to %s", source_file, target_file)
raise
def concatenate_text_file(target_file, source_file, encoding=None):
assert is_.absolute_file(source_file)
if encoding is None:
encoding = "utf_8"
with target_file.open(
encoding=encoding, mode="at", newline=None
) as writer:
with source_file.open(
encoding=encoding, mode="rt", newline=None
) as reader:
for line in reader:
writer.write(line)
def concatenate_text_files(target_file, source_files, encoding=None):
if encoding is None:
encoding = "utf_8"
assert is_.instance(source_files, list)
with target_file.open(
encoding=encoding, mode="wt", newline=None
) as writer:
for s in source_files:
assert is_.absolute_file(s)
with s.open(encoding=encoding, mode="rt", newline=None) as reader:
for line in reader:
writer.write(line)
def create_directory(path_):
try:
makedirs(path_)
except BaseException:
log.error("Failed to create directory %s", path_)
raise
def delete_directory(path_):
try:
rmtree(path_)
except BaseException:
log.error("Failed to delete directory %s", path_)
raise
def delete_directory_tree(path_, force=False):
try:
rmtree(path_, ignore_errors=force)
except BaseException:
log.error("Failed to delete directory tree %s", path_)
raise
def delete_file(file_):
try:
remove(file_)
except BaseException:
log.error("Failed to delete directory tree %s", path_)
raise
def directory_exists(path_):
result = isdir(path_)
return result
def file_exists(path_name):
result = isfile(path_name)
return result
def file_size(file_):
return getsize(file_)
def make_hard_link(new_path, existing_path):
if isinstance(new_path, str):
new_path = Path(new_path)
assert is_.instance(new_path, Path)
assert is_.absolute_path(new_path)
assert is_.existing_absolute_path(existing_path)
link(existing_path, new_path)
# TODO: Added in Python 3.8
# new_path.link_to(existing_path)
def maybe_create_directory(path_):
if not directory_exists(path_):
create_directory(path_)
def maybe_delete_directory(path_):
if directory_exists(path_):
delete_directory(path_)
def maybe_delete_file(path_):
if file_exists(path_):
delete_file(path_)
def read_binary_from_file(file_):
with open(file_, mode="rb") as reader:
result = reader.read()
assert is_.instance(result, bytes)
return result
def read_text_from_file(file_, encoding=None):
if encoding is None:
encoding = "utf_8"
with open(file_, encoding=encoding, mode="rt", newline=None) as reader:
result = reader.read()
assert is_.instance(result, str)
return result
def recreate_directory(path_):
maybe_delete_directory(path_)
create_directory(path_)
def split_basename(path_):
parent, basename = split_path(path_)
name = PurePath(basename)
suffixes = ""
while True:
suffix, name = name.suffix, name.stem
if not suffix:
break
suffixes = suffix + suffixes
name = PurePath(name)
return str(name), suffixes
def split_path(path_):
if not isinstance(path_, Path):
path_ = PurePath(path_)
return path_.parent, path_.name
def touch(file_):
try:
if not isinstance(file_, Path):
file_ = Path(file_)
if not file_.exists():
write_text_into_file(file_, "")
file_.touch(exist_ok=True)
except BaseException:
log.error("Failed to touch file %s", file_)
raise
def write_binary_into_file(file_, binary_content):
assert is_.instance(binary_content, bytes)
with open(file_, mode="wb") as writer:
count = writer.write(binary_content)
assert is_.equal(len(binary_content), count)
def write_text_into_file(file_, text_content, encoding=None):
if encoding is None:
encoding = "utf_8"
assert is_.instance(text_content, str)
with open(file_, encoding=encoding, mode="wt", newline=None) as writer:
count = writer.write(text_content)
assert is_.equal(len(text_content), count)
"""DisabledContent
"""
| 25.37037 | 78 | 0.67719 |
074c55862deafdb5f8060adffb2d4fdde8f10c3a | 1,298 | py | Python | Pytorch/utils.py | JasperMorrison/PytorchToCaffe | 04f0066cdc8a9da92591d7361907c3ea53271707 | [
"MIT"
] | 794 | 2018-09-06T07:38:41.000Z | 2022-03-16T06:30:37.000Z | Pytorch/utils.py | JasperMorrison/PytorchToCaffe | 04f0066cdc8a9da92591d7361907c3ea53271707 | [
"MIT"
] | 111 | 2018-12-04T11:18:27.000Z | 2022-03-31T07:39:11.000Z | Pytorch/utils.py | JasperMorrison/PytorchToCaffe | 04f0066cdc8a9da92591d7361907c3ea53271707 | [
"MIT"
] | 221 | 2018-11-22T08:11:03.000Z | 2022-03-16T07:11:43.000Z | import torch
import numpy as np
class Resize_preprocess(object):
"""Rescales the input PIL.Image to the given 'size_w,size_h'.
"""
def __init__(self, size_w,size_h):
self.size = (size_w,size_h)
def __call__(self, img):
return img.resize(self.size)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def calculate_mean_std(loader):
# the image should be preprocessed by torch.transform.ToTensor(), so the value is in [0,1]
sum=np.ones(3)
cnt=0
for datas,_ in loader:
cnt+=len(datas)
for data in datas:
data=data.numpy()
sum+=data.sum(1).sum(1)/np.prod(data.shape[1:])
mean=sum/cnt
error=np.ones(3)
_mean=mean.reshape([3,1,1])
for datas,_ in loader:
cnt+=len(datas)
for data in datas:
data=data.numpy()
error+=((data-_mean)**2).sum(1).sum(1)/np.prod(data.shape[1:])
std=np.sqrt(error/cnt)
return mean,std | 26.489796 | 94 | 0.581664 |
958ec7606473ad338af996909d0e207a3a32195b | 1,444 | py | Python | test/bugfixes_test.py | sbharadwajj/backpack | 5d484195735e29b1c08848906618f8ba2f339bec | [
"MIT"
] | null | null | null | test/bugfixes_test.py | sbharadwajj/backpack | 5d484195735e29b1c08848906618f8ba2f339bec | [
"MIT"
] | null | null | null | test/bugfixes_test.py | sbharadwajj/backpack | 5d484195735e29b1c08848906618f8ba2f339bec | [
"MIT"
] | null | null | null | import itertools
import pytest
import torch
import backpack
def parameters_issue_30():
possible_values = {
"N": [4],
"C_in": [4],
"C_out": [6],
"H": [6],
"W": [6],
"K": [3],
"S": [1, 3],
"pad": [0, 2],
"dil": [1, 2],
}
configs = [
dict(zip(possible_values.keys(), config_tuple))
for config_tuple in itertools.product(*possible_values.values())
]
return {
"argvalues": configs,
"ids": [str(config) for config in configs],
}
@pytest.mark.parametrize("params", **parameters_issue_30())
def test_convolutions_stride_issue_30(params):
"""
https://github.com/f-dangel/backpack/issues/30
The gradient for the convolution is wrong when `stride` is not a multiple of
`D + 2*padding - dilation*(kernel-1) - 1`.
"""
torch.manual_seed(0)
mod = torch.nn.Conv2d(
in_channels=params["C_in"],
out_channels=params["C_out"],
kernel_size=params["K"],
stride=params["S"],
padding=params["pad"],
dilation=params["dil"],
)
backpack.extend(mod)
x = torch.randn(size=(params["N"], params["C_in"], params["W"], params["H"]))
with backpack.backpack(backpack.extensions.BatchGrad()):
loss = torch.sum(mod(x))
loss.backward()
for p in mod.parameters():
assert torch.allclose(p.grad, p.grad_batch.sum(0))
| 24.066667 | 81 | 0.567867 |
582872021cd7bd0770197e014695841fc83577e2 | 10,819 | py | Python | tests/manage/z_cluster/nodes/test_disk_failures.py | anubhav-here/ocs-ci | cf922411c721922f58d852167ff9ab8a8a6e15a6 | [
"MIT"
] | null | null | null | tests/manage/z_cluster/nodes/test_disk_failures.py | anubhav-here/ocs-ci | cf922411c721922f58d852167ff9ab8a8a6e15a6 | [
"MIT"
] | null | null | null | tests/manage/z_cluster/nodes/test_disk_failures.py | anubhav-here/ocs-ci | cf922411c721922f58d852167ff9ab8a8a6e15a6 | [
"MIT"
] | null | null | null | import logging
import pytest
import random
from ocs_ci.ocs import node, constants
from ocs_ci.framework.testlib import (
tier4, tier4b, ignore_leftovers, ManageTest, aws_platform_required,
vsphere_platform_required, bugzilla
)
from tests.sanity_helpers import Sanity
from tests.helpers import wait_for_ct_pod_recovery
from ocs_ci.ocs.resources.pvc import get_deviceset_pvs, get_deviceset_pvcs
from ocs_ci.ocs.resources.pod import (
get_osd_deployments, get_osd_pods, get_pod_node, get_operator_pods, get_osd_prepare_pods
)
from ocs_ci.ocs.resources.ocs import get_job_obj
logger = logging.getLogger(__name__)
@tier4
@tier4b
@ignore_leftovers
class TestDiskFailures(ManageTest):
"""
Test class for detach and attach worker volume
"""
@pytest.fixture(autouse=True)
def teardown(self, request, nodes):
"""
Restart nodes that are in status NotReady, for situations in
which the test failed before restarting the node after detach volume,
which leaves nodes in NotReady
"""
def finalizer():
not_ready_nodes = [
n for n in node.get_node_objs() if n
.ocp.get_resource_status(n.name) == constants.NODE_NOT_READY
]
logger.warning(
f"Nodes in NotReady status found: {[n.name for n in not_ready_nodes]}"
)
if not_ready_nodes:
nodes.restart_nodes(not_ready_nodes)
node.wait_for_nodes_status()
request.addfinalizer(finalizer)
@pytest.fixture(autouse=True)
def init_sanity(self):
"""
Initialize Sanity instance
"""
self.sanity_helpers = Sanity()
@aws_platform_required
@pytest.mark.polarion_id("OCS-1085")
def test_detach_attach_worker_volume(self, nodes, pvc_factory, pod_factory):
"""
Detach and attach worker volume
- Detach the data volume from one of the worker nodes
- Validate cluster functionality, without checking cluster and Ceph
health (as one node volume is detached, the cluster will be unhealthy)
by creating resources and running IO
- Wait for the volumes to be re-attached back to the worker node
- Restart the node so the volume will get re-mounted
"""
# Get a data volume
data_volume = nodes.get_data_volumes()[0]
# Get the worker node according to the volume attachment
worker = nodes.get_node_by_attached_volume(data_volume)
# Detach volume (logging is done inside the function)
nodes.detach_volume(data_volume, worker)
# Validate cluster is still functional
# In case the selected node that its volume disk was detached was the one
# running the ceph tools pod, we'll need to wait for a new ct pod to start.
# For that, a function that connects to the ct pod is being used to check if
# it's alive
assert wait_for_ct_pod_recovery(), "Ceph tools pod failed to come up on another node"
self.sanity_helpers.create_resources(pvc_factory, pod_factory)
# Wait for worker volume to be re-attached automatically to the node
assert nodes.wait_for_volume_attach(data_volume), (
"Volume failed to be re-attached to a worker node"
)
# Restart the instance so the volume will get re-mounted
nodes.restart_nodes([worker])
# Cluster health check
# W/A: For the investigation of BZ 1825675, timeout is increased to see if cluster
# becomes healthy eventually
# TODO: Remove 'tries=100'
self.sanity_helpers.health_check(tries=100)
@aws_platform_required
@pytest.mark.polarion_id("OCS-1086")
def test_detach_attach_2_data_volumes(self, nodes, pvc_factory, pod_factory):
"""
Detach and attach disk from 2 worker nodes
- Detach the data 2 of the data volumes from their worker nodes
- Wait for the volumes to be re-attached back to the worker nodes
- Restart the nodes so the volume will get re-mounted in each node
- Check cluster health and functionality to make sure detach,
attach and restart did not affect the cluster
"""
# Get 2 data volumes
data_volumes = nodes.get_data_volumes()[:2]
workers_and_volumes = [
{'worker': nodes.get_node_by_attached_volume(vol), 'volume': vol}
for vol in data_volumes
]
for worker_and_volume in workers_and_volumes:
# Detach the volume (logging is done inside the function)
nodes.detach_volume(
worker_and_volume['volume'], nodes.detach_volume(worker_and_volume['worker'])
)
for worker_and_volume in workers_and_volumes:
# Wait for worker volume to be re-attached automatically to the node
assert nodes.wait_for_volume_attach(worker_and_volume['volume']), (
f"Volume {worker_and_volume['volume']} "
f"failed to be re-attached to a worker node"
)
# Restart the instances so the volume will get re-mounted
nodes.restart_nodes(
[worker_and_volume['worker'] for worker_and_volume in workers_and_volumes]
)
# Validate cluster is still functional
self.sanity_helpers.health_check()
self.sanity_helpers.create_resources(pvc_factory, pod_factory)
@bugzilla('1830702')
@vsphere_platform_required
@pytest.mark.polarion_id("OCS-2172")
def test_recovery_from_volume_deletion(self, nodes, pvc_factory, pod_factory):
"""
Test cluster recovery from disk deletion from the platform side.
Based on documented procedure detailed in
https://bugzilla.redhat.com/show_bug.cgi?id=1787236#c16
"""
logger.info("Picking a PV which will be deleted from the platform side")
osd_pvs = get_deviceset_pvs()
osd_pv = random.choice(osd_pvs)
osd_pv_name = osd_pv.name
# get the claim name
logger.info(f"Getting the claim name for OSD PV {osd_pv_name}")
claim_name = osd_pv.get().get('spec').get('claimRef').get('name')
# Get the backing volume name
logger.info(f"Getting the backing volume name for PV {osd_pv_name}")
backing_volume = nodes.get_data_volumes(pvs=[osd_pv])[0]
# Get the corresponding PVC
logger.info(f"Getting the corresponding PVC of PV {osd_pv_name}")
osd_pvcs = get_deviceset_pvcs()
osd_pvcs_count = len(osd_pvcs)
osd_pvc = [ds for ds in osd_pvcs if ds.get().get('metadata').get('name') == claim_name][0]
# Get the corresponding OSD pod
logger.info(f"Getting the corresponding OSD pod of PVC {osd_pvc.name}")
osd_pods = get_osd_pods()
osd_pods_count = len(osd_pods)
osd_pod = [
osd_pod for osd_pod in osd_pods if osd_pod.get()
.get('metadata').get('labels').get(constants.CEPH_ROOK_IO_PVC_LABEL) == claim_name
][0]
# Get the node that has the OSD pod running on
logger.info(f"Getting the node that has the OSD pod {osd_pod.name} running on")
osd_node = get_pod_node(osd_pod)
volume_size = osd_pvc.size
osd_prepare_pods = get_osd_prepare_pods()
osd_prepare_pod = [
pod for pod in osd_prepare_pods if pod.get().get('metadata')
.get('labels').get(constants.CEPH_ROOK_IO_PVC_LABEL) == claim_name
][0]
osd_prepare_job_name = osd_prepare_pod.get().get('metadata').get('labels').get('job-name')
osd_prepare_job = get_job_obj(osd_prepare_job_name)
# Get the corresponding OSD deployment
logger.info(f"Getting the corresponding OSD deployment for OSD PVC {claim_name}")
osd_deployment = [
osd_pod for osd_pod in get_osd_deployments() if osd_pod.get()
.get('metadata').get('labels').get(constants.CEPH_ROOK_IO_PVC_LABEL) == claim_name
][0]
# Delete the volume from the platform side
logger.info(f"Deleting volume {backing_volume} from the platform side")
nodes.detach_volume(backing_volume, osd_node)
# Delete the OSD deployment
osd_deployment_name = osd_deployment.name
logger.info(f"Deleting OSD deployment {osd_deployment_name}")
osd_deployment.delete()
osd_deployment.ocp.wait_for_delete(resource_name=osd_deployment_name, timeout=120)
# Delete the OSD prepare job
osd_prepare_job.delete()
osd_prepare_job.ocp.wait_for_delete(resource_name=osd_prepare_job_name, timeout=120)
# Delete the OSD PVC
osd_pvc_name = osd_pvc.name
logger.info(f"Deleting OSD PVC {osd_pvc_name}")
osd_pvc.delete()
osd_pvc.ocp.wait_for_delete(resource_name=osd_pvc_name)
# Recreate a volume from the platform side
logger.info("Creating a replacing volume from the platform side")
nodes.create_and_attach_volume(osd_node, volume_size)
# Delete the rook ceph operator pod to trigger reconciliation
rook_operator_pod = get_operator_pods()[0]
logger.info(f"deleting Rook Ceph operator pod {rook_operator_pod.name}")
rook_operator_pod.delete()
timeout = 600
# Wait for OSD PVC to get created and reach Bound state
logger.info("Waiting for a new OSD PVC to get created and reach Bound state")
assert osd_pvc.ocp.wait_for_resource(
timeout=timeout, condition=constants.STATUS_BOUND, selector=constants.OSD_PVC_GENERIC_LABEL,
resource_count=osd_pvcs_count
), (
f"Cluster recovery failed after {timeout} seconds. "
f"Expected to have {osd_pvcs_count} OSD PVCs in status Bound. Current OSD PVCs status: "
f"{[pvc.ocp.get_resource(pvc.get().get('metadata').get('name'), 'STATUS') for pvc in get_deviceset_pvcs()]}"
)
# Wait for OSD pod to get created and reach Running state
logger.info("Waiting for a new OSD pod to get created and reach Running state")
assert osd_pod.ocp.wait_for_resource(
timeout=timeout, condition=constants.STATUS_RUNNING, selector=constants.OSD_APP_LABEL,
resource_count=osd_pods_count
), (
f"Cluster recovery failed after {timeout} seconds. "
f"Expected to have {osd_pods_count} OSD pods in status Running. Current OSD pods status: "
f"{[osd_pod.ocp.get_resource(pod.get().get('metadata').get('name'), 'STATUS') for pod in get_osd_pods()]}"
)
# Validate cluster is still functional
self.sanity_helpers.health_check(tries=80)
self.sanity_helpers.create_resources(pvc_factory, pod_factory)
| 42.427451 | 120 | 0.669933 |
a24d6b1c501f8ce3270657abedb92168865d7e8d | 9,350 | py | Python | Lib/site-packages/mysql/connector/fabric/caching.py | jmsnur/mytaxi-test | eb7f70d0ac1c4df32aaebaab118a25c83683ce13 | [
"bzip2-1.0.6"
] | 110 | 2020-04-01T02:27:38.000Z | 2022-03-01T16:17:48.000Z | Lib/site-packages/mysql/connector/fabric/caching.py | jmsnur/mytaxi-test | eb7f70d0ac1c4df32aaebaab118a25c83683ce13 | [
"bzip2-1.0.6"
] | 40 | 2020-03-31T23:30:47.000Z | 2022-03-12T00:20:57.000Z | Lib/site-packages/mysql/connector/fabric/caching.py | jmsnur/mytaxi-test | eb7f70d0ac1c4df32aaebaab118a25c83683ce13 | [
"bzip2-1.0.6"
] | 19 | 2020-04-12T19:17:22.000Z | 2022-03-07T17:03:25.000Z | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Implementing caching mechanisms for MySQL Fabric"""
import bisect
from datetime import datetime, timedelta
from hashlib import sha1
import logging
import threading
from . import FabricShard
_LOGGER = logging.getLogger('myconnpy-fabric')
_CACHE_TTL = 1 * 60 # 1 minute
def insort_right_rev(alist, new_element, low=0, high=None):
"""Similar to bisect.insort_right but for reverse sorted lists
This code is similar to the Python code found in Lib/bisect.py.
We simply change the comparison from 'less than' to 'greater than'.
"""
if low < 0:
raise ValueError('low must be non-negative')
if high is None:
high = len(alist)
while low < high:
middle = (low + high) // 2
if new_element > alist[middle]:
high = middle
else:
low = middle + 1
alist.insert(low, new_element)
class CacheEntry(object):
"""Base class for MySQL Fabric cache entries"""
def __init__(self, version=None, fabric_uuid=None, ttl=_CACHE_TTL):
self.version = version
self.fabric_uuid = fabric_uuid
self.last_updated = datetime.utcnow()
self._ttl = ttl
@classmethod
def hash_index(cls, part1, part2=None):
"""Create hash for indexing"""
raise NotImplementedError
@property
def invalid(self):
"""Returns True if entry is not valid any longer
This property returns True when the entry is not valid any longer.
The entry is valid when now > (last updated + ttl), where ttl is
in seconds.
"""
if not self.last_updated:
return False
atime = self.last_updated + timedelta(seconds=self._ttl)
return datetime.utcnow() > atime
def reset_ttl(self):
"""Reset the Time to Live"""
self.last_updated = datetime.utcnow()
def invalidate(self):
"""Invalidates the cache entry"""
self.last_updated = None
class CacheShardTable(CacheEntry):
"""Cache entry for a Fabric sharded table"""
def __init__(self, shard, version=None, fabric_uuid=None):
if not isinstance(shard, FabricShard):
raise ValueError("shard argument must be a FabricShard instance")
super(CacheShardTable, self).__init__(version=version,
fabric_uuid=fabric_uuid)
self.partitioning = {}
self._shard = shard
self.keys = []
self.keys_reversed = []
if shard.key and shard.group:
self.add_partition(shard.key, shard.group)
def __getattr__(self, attr):
return getattr(self._shard, attr)
def add_partition(self, key, group):
"""Add sharding information for a group"""
if self.shard_type == 'RANGE':
key = int(key)
elif self.shard_type == 'RANGE_DATETIME':
try:
if ':' in key:
key = datetime.strptime(key, "%Y-%m-%d %H:%M:%S")
else:
key = datetime.strptime(key, "%Y-%m-%d").date()
except:
raise ValueError(
"RANGE_DATETIME key could not be parsed, was: {0}".format(
key
))
elif self.shard_type == 'RANGE_STRING':
pass
elif self.shard_type == "HASH":
pass
else:
raise ValueError("Unsupported sharding type {0}".format(
self.shard_type
))
self.partitioning[key] = {
'group': group,
}
self.reset_ttl()
bisect.insort_right(self.keys, key)
insort_right_rev(self.keys_reversed, key)
@classmethod
def hash_index(cls, part1, part2=None):
"""Create hash for indexing"""
return sha1(part1.encode('utf-8') + part2.encode('utf-8')).hexdigest()
def __repr__(self):
return "{class_}({database}.{table}.{column})".format(
class_=self.__class__,
database=self.database,
table=self.table,
column=self.column
)
class CacheGroup(CacheEntry):
"""Cache entry for a Fabric group"""
def __init__(self, group_name, servers):
super(CacheGroup, self).__init__(version=None, fabric_uuid=None)
self.group_name = group_name
self.servers = servers
@classmethod
def hash_index(cls, part1, part2=None):
"""Create hash for indexing"""
return sha1(part1.encode('utf-8')).hexdigest()
def __repr__(self):
return "{class_}({group})".format(
class_=self.__class__,
group=self.group_name,
)
class FabricCache(object):
"""Singleton class for caching Fabric data
Only one instance of this class can exists globally.
"""
def __init__(self, ttl=_CACHE_TTL):
self._ttl = ttl
self._sharding = {}
self._groups = {}
self.__sharding_lock = threading.Lock()
self.__groups_lock = threading.Lock()
def remove_group(self, entry_hash):
"""Remove cache entry for group"""
with self.__groups_lock:
try:
del self._groups[entry_hash]
except KeyError:
# not cached, that's OK
pass
else:
_LOGGER.debug("Group removed from cache")
def remove_shardtable(self, entry_hash):
"""Remove cache entry for shard"""
with self.__sharding_lock:
try:
del self._sharding[entry_hash]
except KeyError:
# not cached, that's OK
pass
def sharding_cache_table(self, shard, version=None, fabric_uuid=None):
"""Cache information about a shard"""
entry_hash = CacheShardTable.hash_index(shard.database, shard.table)
with self.__sharding_lock:
try:
entry = self._sharding[entry_hash]
entry.add_partition(shard.key, shard.group)
except KeyError:
# New cache entry
entry = CacheShardTable(shard, version=version,
fabric_uuid=fabric_uuid)
self._sharding[entry_hash] = entry
def cache_group(self, group_name, servers):
"""Cache information about a group"""
entry_hash = CacheGroup.hash_index(group_name)
with self.__groups_lock:
try:
entry = self._groups[entry_hash]
entry.servers = servers
entry.reset_ttl()
_LOGGER.debug("Recaching group {0} with {1}".format(
group_name, servers))
except KeyError:
# New cache entry
entry = CacheGroup(group_name, servers)
self._groups[entry_hash] = entry
_LOGGER.debug("Caching group {0} with {1}".format(
group_name, servers))
def sharding_search(self, database, table):
"""Search cache for a shard based on database and table"""
entry_hash = CacheShardTable.hash_index(database, table)
entry = None
try:
entry = self._sharding[entry_hash]
if entry.invalid:
_LOGGER.debug("{0} invalidated".format(entry))
self.remove_shardtable(entry_hash)
return None
except KeyError:
# Nothing in cache
return None
return entry
def group_search(self, group_name):
"""Search cache for a group based on its name"""
entry_hash = CacheGroup.hash_index(group_name)
entry = None
try:
entry = self._groups[entry_hash]
if entry.invalid:
_LOGGER.debug("{0} invalidated".format(entry))
self.remove_group(entry_hash)
return None
except KeyError:
# Nothing in cache
return None
return entry
def __repr__(self):
return "{class_}(groups={nrgroups},shards={nrshards})".format(
class_=self.__class__,
nrgroups=len(self._groups),
nrshards=len(self._sharding)
)
| 33.156028 | 78 | 0.598075 |
3085c6509a32bf174b9be0c2be73b315860296ac | 2,357 | py | Python | ooobuild/lo/chart/x_complex_description_access.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/chart/x_complex_description_access.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/chart/x_complex_description_access.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.chart
import typing
from abc import abstractmethod
from .x_chart_data_array import XChartDataArray as XChartDataArray_df4c0cdd
class XComplexDescriptionAccess(XChartDataArray_df4c0cdd):
"""
Offers access to complex column and row descriptions.
Can be obtained from interface XChartDocument via method getData().
**since**
OOo 3.3
See Also:
`API XComplexDescriptionAccess <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1chart_1_1XComplexDescriptionAccess.html>`_
"""
__ooo_ns__: str = 'com.sun.star.chart'
__ooo_full_ns__: str = 'com.sun.star.chart.XComplexDescriptionAccess'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.chart.XComplexDescriptionAccess'
@abstractmethod
def getComplexColumnDescriptions(self) -> 'typing.Tuple[typing.Tuple[str, ...], ...]':
"""
retrieves the description texts for all columns.
"""
@abstractmethod
def getComplexRowDescriptions(self) -> 'typing.Tuple[typing.Tuple[str, ...], ...]':
"""
retrieves the description texts for all rows.
"""
@abstractmethod
def setComplexColumnDescriptions(self, rColumnDescriptions: 'typing.Tuple[typing.Tuple[str, ...], ...]') -> None:
"""
sets the description texts for all columns.
"""
@abstractmethod
def setComplexRowDescriptions(self, rRowDescriptions: 'typing.Tuple[typing.Tuple[str, ...], ...]') -> None:
"""
sets the description texts for all rows.
"""
__all__ = ['XComplexDescriptionAccess']
| 35.712121 | 154 | 0.703012 |
614cd99aacfc1fa1e332b985f56a69332f97cfa4 | 60,544 | py | Python | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-ybekele | 276c278612e16b05ea607cfd236d273b77341fe3 | [
"MIT"
] | null | null | null | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-ybekele | 276c278612e16b05ea607cfd236d273b77341fe3 | [
"MIT"
] | 6 | 2018-12-07T20:46:29.000Z | 2021-06-10T20:54:51.000Z | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-ybekele | 276c278612e16b05ea607cfd236d273b77341fe3 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
import base64
def index(request):
return render(request, 'index.html', {})
# Example for returning a file from an endpoint
def text_file(request):
return HttpResponse("Hello World", content_type="text/plain")
# FIX THIS to return the content as:
image = base64.b64decode('/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAICAgICAQICAgICAgIDAwYEAwMDAwcFBQQGCAcICAgHCAgJCg0LCQkMCggICw8LDA0ODg4OCQsQEQ8OEQ0ODg7/2wBDAQICAgMDAwYEBAYOCQgJDg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg7/wgARCAHGAkYDASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAYHAwUBBAgCCf/EABsBAQEBAQEBAQEAAAAAAAAAAAABAgMEBQYH/9oADAMBAAIQAxAAAAH38D4UJEpr1RzTUC4e71I8gbT0fP8AVbDmAAAAAAAAAAAAAAAAAAAAAAAAB8H2wEzsHB2GAZ2AZ2AZ3X4Oy+S/QPxI7W2ujr65nn7sw+J7/wA54n7BpTp+U/WeUxOlPo/R9Ibry1Dbj2w8zwOX2pz5Wl9lyYfN+ZfUev8AOcCl9tvN0Hj1rs/JsM3n3Q45mwAAAAAAAAAAAAAHHOKTrPPXz0x6VwfH1jWV8LeXnS2/o/KmKuar+Z9T00paF6enVD9wuxEJbHDhXYE1+P8Aqej1fu97rksU28/HfNe2LQn8r+9+teHNNv2/rrGvvQXyQTRWznsrrYzX4IXrpsSMQW8+/LWHjb9GEtOau9259CaAAAAAAAAAAAAAA4xZkebXpDjfPB95uvnbU7JvFTTWScezwxfbbN4foU983G1KO2NwMtFIcfGpmGddgTX4paX29VX0nnT0RlkHyv1sn8z+j9T8L8x7U3ux7n6DjCcEv2msV99z1FcZbD4srbNYhY5IuWdAoAAAAAAAAAAAAAAAAAD5+h1nZM9Z2R1nZHWdkdZ2R1uO0AaAAx8x/f8Am3kHpwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB89bs1fwx579ReHLw/MX1OP12gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPjxt7Hr/wTx/tMXqz8/wCef9rWbP8AW+jkaoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGq2fnD0Zhr9n5w22pdOem+0XKpa6K4Uvp49Ac0lsktrV1VHD0XrIJoFu7nzxZVT3nz9OEsjnTUzNX6hshZ2OspLRS+nVBddb87PnjsbzeOSitDmenldwO6v/isokl98wKDRe7HkugAAAAAAAAAAAAAAAAPJlizWa4niOzLag/SdzqWV0sqS9PVDb+p515tQtTdmzNFzUVgu6X7kRqu+urm67z167hUbusK7t7rznVRX9U3Ltp5HLJhqQ/zvaUlSsonakqWtenYWpsp/wBHayPXNTS674FjVbbG3opvNaS+34DLaG+x5QGwAAAAAAAAAAAAAAAPnmgL7y++aIkGpaypdrlY3Nd2JpxzB+1Eu4gO6JGq2pa9WK5xRZvEU06buUQyZ28KS1R6CRugsvUSOc1IVTy0lnMK6RYPMdkNchQAAAAAAAAAAAAAAAAAAAPJdj7C2cPFUznWm3nY8zvQy1h6j8/egbnzj2N4iM8bncLCc860tum11h82QP6nNb5nz6g8p+rDzBJ9309W2POHpqmudpjr2zk6Y1GacdHF2nn66NTbrvSFX2hp9CaAAAAAAAAAAAAAAAAAAAA+eahjmZ6AVLp9S8kEj5bXPHmiX0yqHdFi8UZvbLVeYbKltZx52PRFOWfBjezKHTHUCaAAAA+XNXZlpOOdUD5VhZ0n040ku8V3XGs+iuar6MXDz80+tw8+ZbF1LUUHmxb3VTBbPSKsLPt+VU2rl9nGr881Ra6fRwvLjBHYR2Q2chQAAKMh+9xZdPdaGRdOdiVJZtDzXrjxn6GoouaATDFldWsp3Y1FvRlB37CnbjTWLy96lomSBSPdSzUgMJm2kk0/Ytrr7uy1PbyJCcP39RofrsyFYNxYfQyy0Db8b6yc7PH1cWv9fYWyzZDb/nP0brPmOGb30jjVIaTPJuvPawv05575dK7v6kbf786o7smnnHrDa+l8c3yuWxartSb8ibvr7qK02M0m1U/IpH9VCdLu/s33T61xc75tseLSnpiEWzAvQhIROgAAHDiC5TwacI5I05C/PMEnWX0NAAAAAAAAAAAAAAAHHIAAAAAAAAAAAAAAq7WbPVs3HAp6zfLc01LUy9CHWCZetXPdLYuOEzi5+hOgAAAAAAAAAAAAAAAAAAAAAAAAAAAAADjp6mSRkct7zP14+tfJFfMXlMUklfPxr7dmAAAA1e0AAAADq4zvAAOr2D6AdXsH0AAAAAAAAAAAAAAADyjLJBAcpLuag9I750PtYzJD0pzxzN+c59WfpTKra3tKh4sWV1zDd59SwTuU2k8sOldKtkXj56ukhmKJ4pZzaXn30FVM7uA9rFyzXoVNvMi7+w0tks0laaubtKxtZQ2sWBdMAkstM2bCutLsbziWhsrezoT1pdjL9x5yi6N35ymmpJd7DvpfRQmgAAAAAAAAAAAIFDrt5k6VQTGb+Lv5/mlnvdwpe4s40dTXrwR2LWUypKW2FxpR0vsIlF2PLuJaPuvKshWrshLXNj8rKpkM0TVJT+XEivVmhK0wWiXQVNfGGPEnrv6ke8xfVT9nVG3X2FRTVWBxFG23txAvifkhSal+haAAAAAAAAAAABxqNvSvm5eZvWPj63/y+PWo/Y+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADH4h9v/AB4r+ffbtD1B8Xl0t38/f6TYboAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgGdgM52AZ2AZ2AZ2AZ2AZ2AZ2AZ2AZ3TymdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoEEzSvp6xofiX44g+872eyNb/tZprsia1GikeS4iOSRcWd7OZ1yGgMGfBnZ+ePiM+bpKfit9tprIr5Wxzt7i33kD0FecD+q+xef6HrmR09MvV8/52fnaf8Al9NrwXdfnpu+3pX4uuXj6/StYyLwt7PB7mlv5ze3Zxn+qrGM+32+iopDa55+Sa7Lwlufl9/eff8AJGq68vUe98AWR4/T7s70OmP0/n/Q7QAAAAAAACGyLU5fX4I3l7uf1eXD3ejsePaRDwfTA0mDBl9Xh6Lva3txZMvzZL3HPzvqgYM+DOzE6n9BfHh9NIXL2+knhp3ZV6e/F+xaxJ5qP6UfyePlfVV3h5J9+/PMrmHQt9iePfcdJ+L3UhfWgunyfTj9NzmCe/59e+mvL/rTXm1mmsPR+70divbMnk8f5HaT1Np/kejrc+gtT7vN5gnu9+fD9H1pZMGnX0Pk8jcAAAAAAAAMAzsBnOwDOwDOwDPxhGdgGdgGdgGdgDPgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoHHMK2NzJECkpt1d4ksjmP7Wa7fNdyKyRK/7pM0f+VkKuN2zLEMmcvIaAAAAAAAAAAAAAAAAAAAAAAAAAAA0v3tzMX2m0Gt18hHR7zldNk2pIz292rRt4iObPYDS7oUFAAAAAAAAAAAAAAAAAAAAAAAAAAAcRbUXnYLFlnQAADhBudYnHMHm8cibAA4Rmut8rsRmTY2DQABHvlmRo7ITkNAAAAAAAAAAAAAAAAAAAaPRThcaHf8cyg0ABFOlN1xW9kclCaAA6FVXI1jod5znQKABqW2M6nbDQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//xAA3EAABBAIBAgIHBwMFAQEAAAAFAgMEBgABBxITFRYIEBEUFyBQMDI3OEBBYCEzNBgxNTZwI0j/2gAIAQEAAQUC/hdh5GrlcLwuXK5PJ+dhmT+VwI2d8aqtkLl+qSyKVJWj6lvetJ6t51bzq3nVvOredW86t51bzq3nVvOrede9a/b1W5SlcqBoLjdlZYekLI8fGzkSaOeikcqylL4ydlxWZbs+DHIfK7LiMy3Z8GOQ37NajSos2N6ve4nif0FbzLbyXmVSMX9z7Rv/AB/UUj+9c7BJ9dAW57kefJZBPnDwa2MB/MNtHsRLFVPwtvs5Y28UdDc2wGbcbjS5BKxkuTylzIIPS7Mbn7rhIzq+36cobd6MhubYK5MspqRHPGInETJY8H5GdN3FXGK522OXIt+Iaf8A1+/b0abJ2SzvMka1Z2uvcdz+36p8khq1vFOyVtc6SN4/BWDS3x9plSCgexFZGo9zded84a6w5TZYdjf+P6nHXGfSWuLrS+WRpaHGzZ/UaVGKjz1y5CfT5sqn4WkgPiNuk1rq5Ee4+fcFQ65uLeJdTl+Oya13zkQF7ryCTA+I22VWtq5EA1/wQaii9FPmAEyL3Kdh7qDNX6rJAqE6C9+vVr2ocjEq5ZuwTsdnbTtLDn9vJiZa4GxZza4DE1CDgzxmqy4jU0MOq0qOUgVHUJMemusuyaih8wLgIF13G/8AH9Rt/cXm6SOkSCoAlHiJnyWpsCucek9WC3SWZd/qn4WuOIab956Vbls6XuYw2yqTHQtMhhchEqM68yQZeFNEGXZLLyXmvpDv9r5Pb9gj+x6rE3p7l90pvUpBR7vhCgqO7WLLMfDHAzI4ZVPwtm9fdcQ31qhvukHnXn3YDLvthMqyMwtEGMxIS1GivbQM0vwz6R/vrtJztIztIztIztIztIztIztIztIztIztIztI+S3cTTC1u+C1pz4L2n2x+K7rG35A5G03G4jszpWLGahjFoQ43qK11fwL9/4Q8Qjszvb/AAd1xDUeaZNOygRBROt/wa1vqbrJBp2JJ4/n+2T/AAa5E0vWUX2Hpok03DkNOoej/wAE/wB8TVwaV2Gr2Xd4TWRC4kWKxCg/wT3yJ4nkaXFmx8kSI8SG2429H+fUuIolkaXFlp+xjS4s2P6m3G3mcffYjRW3EPR/pKvzN7+7x481G4tRbisyOSeauXEsktqs1mLapabHha3eF3bzu9ELxbXK1aydmXHscC1vuW2OtDXpEhrCs6SDHYq6lHupOeDG2AeTpzd0nPQ6yf1ZK9NmRx4rVsOPDQZmKersqSzCHJuc5yDQ50Yfw95sOrGGzvi/CtVMOweKVWw74WeLRjXBQq0km6V5mFeRXbYeaGPWka1QHLgRgINHogQdu3T4L2t62n6EWFQDPpD74+p/sGod36MQUXZJNSqQtoam1wRZIo8TtlVn49rW/SVN61vnq4/iNJInjPIL0N6DzseCyj3M1MKon1OvfhVSda1xUPQ6v0cBD8T4ecZdO+P7CV2Ep2oFkm1jjH8M76h5fFEB+Hvj+rl/A+AnB9jk1YBr2+jFGMrA+j9KG2J2pDvyq156L8OAhl0Bw2WFWHVLHthn/R1l6udUEWF6SQvRavnX6+Na0xX/AKFsFE3ecDgYYQBuiDELFioIcSZBwDo+JTh0cvmwcXd4lBIsu2kgUQoZJVMeSOs0gNHKNg4rV1RX4bV4h1qDCACxzIoA2NYq1Eioo661x7Ceh8czIkeeMYoglvQMFDADHEIcZTQw6HoFXFwKexRBTehtaHDKvBqA6GBYoglGMVIfHovkAR7vICDJNXZoorSYlZGRaUmhjOkmEGmAyKMN7+taSn6F+/6HYUPud9WVMl/6h9/d4+lyplCkXarxSNskur4qYKRhVDF2kAZl4VsIYJgs2LNMFLMDCvDSw4vAk3OtRHJF3ho5f7sD4lyLdXYmxZoYaiE7UADzYR8SSKY2WEjgdCsaTFfKkI4sBUbpCXT5BODEAJJwV16YRES5JMyLDRRVgDm0lLQBDShxSAVgfSi0efK9IbYS3+wRKfhejSFeORKXSR5YaLtwYpPl+PQJFtwwNPDuSK+XDTb5Q2m5TqWkDfSIpkVhd0V+Zv8A/StRiRl8ljW0RfSG49ablBgEKMP5/wAq3+Fxv+FUv/i6TBSS4JblqLcX7lvt8SWeO3EtEJtsj6QMsaIavVPnHNQAME43yh9K2Cl75h3/ALV+rrhcdxBF1EQAg2SME2IA+UfdA2M2QwkAKt3IcBLOXRdeOCrEEr0uLYa+ElCTBgEZVyB4JL+LgAJKF2NgJKa5ZGxp+itTiSGOaMAAJIxVTCmwEdSetqphDdfQPqD8Pljyg78YzwOUUshavz92oeAKv29NdPhDIYWZaLfW5VLHPGA4MeDgfVjNgmMWSWatwKLZbC4M47ac5BeiCfMndq52WadwUduZxyP578QsFgjgYfvPIO2BR90/WDh28ABsHzr4tku/Q2LREkpmC7UddAVoR477n9oZPODbV8kY85J5P9RBySyCrBhZ2lNXGWvli2HJAGsSDs6PyhlcPTC4QGavJ8BB86+LFTth1ya/I5CixB9kQU43EFb2arw7zh4rkI3Lk8pesUblzuQfkX1dob4n4T9gfrpR20yLNYh0S7uw3+DYZq2JFCphWZH4/wD8r9qgQOQ8hlrM+Unf/b0lcaixWH+S/wDp3qn1BkgX1/RPJIuC7Bt4iKNgG4sSk0L4fjthJheSY9GmfVYiOKdVuKV4tp018hxxdycsdUpNAitCDrpMhZrAAjVEPbUrJ3ewhIlTk3EONkcr2WEJjQpUqvwbEe2+b5NaqKg9pG10Mrnwm4J+JlTixo0L9q5KRX1PjHo3BRZ1B3kuyvTI3O3jdwyh7WqlUcnYYtBGk7DJKlpM+N6QxK4WJqfCBeXuHagUskfj0URPSyOLDJNc+e6Jp/JU0yBL30IYDwOTIoVk1zSHjar3NJ1urotdPkR2uV6jvwizVnXic4YWkBvRs+H47YOWXkmPRqrdaHwmflJwS4XkIifKmAtiByYfA0a3bZGij3isyvEZdeIjLD4oTrRGXXlIuHW9aAk6VP8AN5D3ariCUaVyFDlTap8l9iSpdduMSVJK2oMs7SvM5/wSZXZg30eyceQ5w3CjSU8I0iO/F4xt4aQaq0ywWAkBKhSw+UZkFrhBOwJO+R77ElTK7cI0xNlMIm+e7O8fPYegkYN8VLN2SyuamiOcpRA0NsFTFPouuXMQTeusoezIrPH4wk0RIw5a+fP2p0OXGrNRMSgFLi2v3omqHL/1DWQExYKuGkmJnF9YPSAtIg2j34tjizIvmtlolZ+QktlKndRxg6VsYKJLZ5cciS9+kEOVOq9tFMnV843oeVbLjRmhVAGVyaR9H/zOf8DmV2YN9HsYlSK5/EUn4a736oBD3/5DR+GD39eI+cvF2vP/AL1mgbSeRBU+2njVZLlXLKCscrYKH54MAn7XOi8cyGL1AFKjxbpVvrxCteIF2qh2pPqon/JCfzBh4LxKggbcDZ4/P+Kn+MFwuKkgq+3CZqP8CYiRYq0Q4qCMeJFi46IFSJ2JChkTfr7z7MaPGLCpzuSSwqE8y+zIjuTIjMz1qOBUSvb/AEjS4s2N9g9LixnfsXHEMx2X2JMX5HHENMetxxDTH0CzPQfjCRrleOALQVJhuOhdMBwRpaKik2qy/jP6rZIkkbimn1lI2uByINqjT4wvh5VtO+FqtEZXGcch36jHt8VXGTlwJwY5k9CDCVW4hBUlWlIsJKJBMk7K7Gsoax7JGsl2p9VhCWSOWbTcJ85I+xQydRiXciSAsXR8oPiWSIX45VZ2BvEsm5z48AhYB42pyrqTGiBkzxGvcoPSk0vZ9Q6lOXAnEisPNSYfKD0pNL2fUOpTlwJxIsuyNxbCbtbIW2ljfhp565PauAWzKI2D9S6Ugkr3YKQCggjyp5PhrxiBuqW4tAtDVl/pzJ6rHtInmfW9bSLsLZiyVwwoDwLKG2J2ojvyqxHmmuGhLYd30cZLdyqoexyZBKzla+efr4pnUatXX/tZUVHJcjCjVhh3zOPt6TCNTR0nKY4w5xgC2lyVQUp1xHxp/Tjmu738NSn5XbRrXwqIb6aBeXGUcV1n8POTdb+Gl79iIlidjt0CkIdRxZybrfw0vfsREsTsdugvw5H+nWe1q0N1+Xqy8n138anfzK/qTNdFnWtUYc472mvdVUYYiSigxoEs3V4pyf5MxCehqdAhkxnkKAmOOGQRIqHVhcOnMUQSjBddHi6szQhLSIdcGxaQmhC+2RCjCoRFFGbWlOkoJg4hUgXrA8wQE1geJIYRqQ4gZDgh4SA7RRW5MYKPhVoQKjha4DCRQASFWIEEHIqg+RQp8BoiBsXuADjU3Eq7VQDRVwqmYERDYJivQ0VVFCE+1CEobMCIhsExXoaKqihCfbIhsSRAWvwANdAV2BXBkMHFg2fYOKq7/q5ZPccx+uUhK244gTDk/R5slEIXKc3tNTn++1T+DXKYy2MntNon0V96OT/gv7WKa4Wso+S2KJRZRaA9AmNzxX8F6de05x06WubTWm4v/h3RrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWKTrq6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NfI4/K8UjLlOyde9J0iW+uQ29N8FYRJW2hUvYSI4pyJ6lOL0ZdlyG5zj0pTrzktG0K6mvlV9/wBXt9SlaQ0/aBsaO1yDX3n49igyZk4lDHIXyrXm3W+VK84oYUiFxvtyYaiwkxJ7MzcqS1EgeahicTdBTkiCbiT5jryWkz7mCHSYthGzI7D7chhx1tptZBpGNuIdaOnYFdr4nkEGXkEb0BFnwloEWCROOwoEzzuC958yQfZHfbkw/wBFttvaNQoepHSnW9NNpxMCCjGosWOpTDC46EIba9T0aNIzTLScchxHt7hw1a+ZX38Vv2afItsqJ2KPCEjUTt0y8uKk3SNHa1XqlP2Qg3GShurBYreqSfS1OptNW45WHn3ES7LMa8YqEtmSHPoS7TGVqJHFCxraKMpb0IyveoRWDGdyvvPjS1Xe74IqXbW7ILMIbDkF5zBrSuNKuLWQg3WLEMzOJIvZJckS39XGCHU6NFKkDSwDfVU/0U7Xbxja9w9ve1DsrTrqlr3Bj9bBH5JDLzzkFS9okr6yrSnpkHTrkXNIdiufIr7+TJbjTsiMrREDHmonPf4FrebRfQjrrKK+yhvZ5CJkbStQmtdlyOASwmOevMIROlm5M2yViz7FPEpPvnEk0eyxJiLjqcprqH4ljnpjGSbiJKSEuDFn8eykS6bYorTUnTwuSErkaI9G5cQtfGYw9MHNEHpnf4bWr4icjK6uTGzM3o70/s1J1L/H/wCicVORIZ0qKw+1Kcd2y9FlaiSERI6H1zfkn6lL2h51qBL09t1TM1sa2065BSiY898ivv8AySWlPD18b2qScGceEoc0fXJbMwtBURBweOSDap3HM11Q2E7Fj2bjAuUt0LiewxpmuILF3/B29UcnSCT8FNHN+5AwWxUO8VA1YCUSk3NgVP4puM+fR6y9VKSaBLLP7Bmdg6+Akh3SEJueLm8SF/G18RWRW6JQ5VXMW/j8kctrPGFnjsP8a3OUmuB/AaV9GV9/6p0Iztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztozto/hq170alqcU5rftGxtuNiIE7TkyK+/wCXYTutV5LiXY63ZLUKDtPs0hWwyi7aZrMl596dpHaXuX7q8qR4oyvfiv1hcTTkl2P3mvDU45H07AfjofSgcwh9lrTEPFQ2lJYj6j54cno3E174iHpuQ/D78p2Al7a2UuO6ia96/wDSp5JuHqCWRtjW9KT85UkuMvTs+dGGyZTk/wCcpIcjDPF5nuouQ7IGfPIJR40vZKPooySjvkPpEyCzMajiUoGj47sUd85aO7JGteJO6hCFtFfnfZRIi+DSvf2GUR4vzuw4z8j3ON76iHGblf8AnH//xAA6EQABAwEFAwkGBQUBAAAAAAABAAIRAwQSITFBEFHwBSAyQEJQYYHBEyIwcaHRBhQzkbEHFVJg4fH/2gAIAQMBAT8BVOm57g1uatPJFts7b9WmQOuSpUqVKnbZH+xqNrOyBX4q/qSatnNFlKZ/dcnWs2ineIhRgUDMlZq8E83TC/79NhIAnjKeol+5TgpVHlRj3vaQRdKOizV4Kdok0i0K18l2t1o92mSrLyeLK27twWCdBWiKu4R1CVczUYJwkEKlyfTpvvAn91KGWGwRG0EgyFyL+JDZadRlTG8FXq3p13bLquoMCuhAdVgKFChQFA5j6L2gFwie5HuutJVnu1KBqmtkYDdf/O5AQJBZeB4lVuTnU6bIcChlj3H2o2zijgpROeyZU7Ab2SlNxKYZEqclOCwiVPUz05Ueqb4qMVUF69xuR+yIxPjCaIgbiqTYuhaNHGSnJUvdTQBdG77JpxaSmYAJrYjzTd/GaY2GgLHNAQOpdqNm5Ao7DgJWqBWaOCOAQQQQPVj+oeNFHqm5FYyn5p3a41RGDuNU/GfL+E7Pz9F2ncblUz8yieiiIm7plx5/REaDwTsUM+rFSpR2TkidmYTfhDFXhdvJxhBXkRjCDggVrs1jaDzT0sV/j8vVHFvmnH3fNZghSndIca7D0Sh0/L7pmbVTHQ43Kj0FphmsMY3Jwzjd6p/RPyTtUc3JqI9+7p9+Cuy6eMlq35puSONQ8aKMG+aGaqZu43KpjKcOnxuVXVD9QDxnjzVPSeMVHR8/55p7kdsfm75+qd0vMrdCZlj3Fn3Hm4oY3U/tRu9dhPvoAYDx9EMS3xTHYMnjBA6lYyjmfJDNDF3mmmYPimuloK1j5/RAm6HeBR7Kd0gY0K3J3SBjQozh5px3J/a8P+fGKq2d9NrSdVGajYMFChBRsA2aygNjcFGKlQpWWKgfHqvutJVkfZfYlznkVNBp857ka9zCY/iVaLDTFJgZU+iH+kEGVCDUBsIKuocxrC7JOY5oxVns7qjZTGS+6vyDbsyqjIcQFUswbRD1ZKDKoIOatHJopU7xVjsjq7iAqtF1N5BVGxS2XKvQNN0FVrG9tMPATaTSU6xwE4QfhNMVSiypJj14/ZUw68MNpI9tqnhwqE5hXThnzLNaXUHXmq0V3VnXnKzWoU2QUHw+8v75avy/6WKe6TITqzy2CcFyfaGUjiuUeUKdRl1pwXJFrbQqEuKtto9rVJ0VC1gNAJVWpefMp1qJp3CVSfddKNuZHinOlxPd8oFXggVexRcFKvIHrsKFCAUBQoChR10vAMc0uEppnmOqAFA8wuU9VLQSgIHMLcU1oHMcwHmwO+v/xAA4EQABAwICBQoFBAIDAAAAAAABAAIRAyESMQRBUcHwBRATIEBQYXGBoSIyQrHRMGKR4QYUM2Di/9oACAECAQE/AU4wLpmk0nmGm/cg0Y1aTgM8k3kd2iOxuzWjcoNfVFM5lfUAi24ChYd/smCRKaJ9vdap42JrCYHGcdgKwDWVChaRyW+m1hBmRKGtYSsJlYefkvRK9WmX02zhIlf5CxtOn0mQ8VyVU0Wtyw46PUxBrb7AScpQznmvK4/lAEZK49vZC3HqsV5429gIQftUphgyq2nVKjMJA/hQpPNnz6HypX0VhbSMSuWqukcoaMaL3TK5J5KdolR7sIAdqCCxLEsWaLzKJ7LKlSpKlSepS0qlULmsMkZ9yV6nR0y5cjPwVnEn5u5NPoVKzMLVR5HrCqXApsxfuNximHI5rz5hdQoujZEQoUp3w5rbKdICeMJhRnOpQsJmEOxkHooHjvU3PonXhE2TDGDw/tM/K1DwlP8AiBO0b1Wu5x4zX1OKjNVviT74jt/Kdk4DWn/EXKocWLxhOuDxqTjLyQhGSNz2J1mYkc4W3mF1qTRiMcxUICU28rLnjszp6GB471rPon5hGMKp5JurwG5MMFnluTdXkfumbt6+hqbl6BYZxDaEIJE68+PT3TCcz4+6bY34upsOzBRzC5hFQhda4UQ5HrbOcHWiYuo+LChcxxtUotX0ytZCNkMgebx5pRHVHy2QgYvPch8x8t4TbOPlvWTmmEBdM+U+e4IWK+tvmET8Hr+FVmHbLfdVCYeRxmqsY1rvkhm3Ft3JhuJ27lS+cE7d6ZkOOLJmTU9SQzFrt7IgB7Y8d/8ASd8jvJPuVbo2zlf7lfU70TslS+VnG1UrAJh+Ty3FUosPDcjPRk64jj0hVs3Rn/X5U3dxq6ov1Bt7gZzDUP27gtXoPsjrTomB3AO5QcLURGLzVPNs7T9uYf8AHKk3PgPunCGu8E9t3AcXRExG1WhN/O5HKUXQ30VS2IbAnRjc3Yv/AD7qAZHiPdCIPgmZR+5bUzKP3K0/wg3UUwTh8R+f1hZaNp9KvUe1mbc1OSmyFkbqbIlG4UnmJlDYtUI5yhx6c2K0KFNlCNxCk/r6VV6Ok5y5Iq9HWuZnuTTNGdXbGKFT5BioXSm5X/6RNlKJCJ5gQp6tbSG0xJKp1mv+Uyi5ya4wZX+0ZhGpDQSqWkOdVjUq9SoxwOpN0wuqADJV6opiVTrYjCFMCJ1qq0sKo6aDULHHJPqVI+FP0uoAcLpVFxcwH9J4mmFip8Rx/KqFuEwefCeii10yCwbUSPDgdTT9ApaXT6N60HQmaLS6NiNG+aDbFf65xSgxpAxLo6c5LSaT3OtkqGi1A+SFpVJzm2VGjhjaqoa9rIN1pUEnCZTNFDamMDNFtk7QJNrBNbhbHd8IhYU6xWHWsJ5sN0R22VKlFSpUqe3BpInqhtkR1G0yQiI6mEqOygmESSeoDZOcTn1GvI6s99f/xABcEAABAwIDAwcIBAcLCAcJAAABAgMEABEFEiETMUEGFCIyUWGREBUjQnGBkqEzUrHBFiAwUHLR8CQ0QGBidHWCsrPhQ1Nwc5OiwtIHNmOlw9PxJjVUg4WVo6Ti/9oACAEBAAY/Av4lmBKMuXLT9I3FbCtn7bkCmorEHGw44dMzTdv7dfvef8Kf10WJOHY6FcFJZbsr2dOv3hj/APsWv/MpuOtGKwQs22shlOQfCon5UFJIKSLgj853NfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VaoV+JykKiSfOb4//ACKrDgVqbeWc5SnelFr6+0cKysNOPK7G0k01ZTMAtm6dvx8KdirSgvIWpN2lZkKtvt5OTq1G6lYYwSf/AJaaZjvSo7T730Ta3AFOewcaaivTYjMp36JlbwC1+wcfxmY70qO0+99E2twBTnsHGmoj02IzKd+iZW6AtfsHGrnQVtochiUzf6RlwKT4jy8y5zHEwpzbDaDPl7bb7fmJttx1tC19RKlWKvZS2kutqdT1kBWqfJ/WH2/lUfo+XGWSjaJOMPko+tZxRt8qkq5SxZEzE3GULaypCkt5+koWvvNwb04zyawmHHCEqOZ/UgJtfQWHEVtcaL6EKJsWXMoPb1TUTDcGZbalNLuvZA3Wd+nfvtSX4duaSkbRIG5Cty0+P21ya/oqP/dJrktNbYVKdaU7kZTvWrogDxrGMTxYrXypbd2bzbotsE7hkH3/ALGbMhY2zJRHl5TDi4dtGG03sAt5QBurXdfuIrzRhuKowuMrDUSCTFS7k3br7+ysUjI5Qs4V5v8ARMtrgbUzVp6xJA6Fz2dtcjDhchmArFEr24UyFpuLAnXXTUjWsawDF5zeJmKhDjcgMBs6gaWGnGuSs1thUpxou5GU71q6IArGMTxcuL5Utu7N5DotsE7ugOzv/Y4jKfxpDcCHNcb2AhtkvAeqTwA079TWDycOcix5b+KlkhEVtCFDXQgCw9o1pzC8WxFrGIq8PVKSUxktFGXNoLezjepHKpGLRG2nFdCGIafQpz5bhR3nuN9D21ExN+7pRyb27lvWsCo1BnO43Gk7aTlfwhMEp2KCd4ctqff+YDa2bhXN5paw9WH9ZcfrXO6xv3UlUNQxNeIdBJk9fNcbzekF0BLlukBwNf1h9vlWAt5Locs0gK8KYjKjvnMOkQgnhw7axKbCd2EppAKF5QbdIDcdKmSnuWZxyNEiqeejjCNgQBxzfdUFrEMDkYXHng8wfU+lzaaZrKA6pIrkcqTMzCauXzv0aRnyXy7hpbuqNKXgcpnAZMjYsYgX0kkk2F294F+NQG3IGR56euK+kPX2GVYRm3a6qT409KDGwaElxto577RKVZc27S5B08iP0fLKWyQHPwhcAv3vEViymXEuJD+W43aCxFCQ0woJfsl9JuUtaGyge821ppUZb0Tmuo2PVdWb3SeBAFt9Yfifo2cYTICTHB0WO0GlwkPJeDMh1WibZSog5fdauTX9FR/7pNYJivOtj5vUo7LZX2mbvvpUblDBm8wfCcktvY5hIT36i3+A7Km4Y3yiktYS68Xm43NQcir8VXuR3aV56VMDivN6YpaDNt1ule/du+dT5mDY/JwVM/8AfbaGAvMe1JvdJrk9ME98+a830w2i3rgb1X36VimO86z88aSjYbO2TKAN99d3ZWCYrzrY+b1KOy2V9pm776VG5RQZvMHwnJKb2OYSU9+ot/gOysRj8751zqUt/NssuXNbTeawzCfOl+aT+dbXm3X/AJNs2ntprHlvFxDUFUdUQNX2l83G/fUzAoOMY2pXOMsTA3oOV1K84vmUL349HTXhUPE5L6VBGFCE7F2eitNelf7qixkcp8R8yx3c7cNCAhXblLgNyO635gIvl031GVEdXiz864cQ4m2Yjvp5uW4rCHIIu2htObKTxvfupCVKKyBqo8a94+3yKTDW22/wUvcKzmVDU5wcPWHsOXSr4itiQ6n6NaRqPlUvDNvzbbpttMmbLqDuuKfgyAVMvNFtdtDYi1QXcQxx/FI8AHmLBYS3s9Mt1EdYgVyeCp+2GGKkXGwtttrf+Vpb31GirxyU/gMaRtmMP2Kbgg3F3N5F+FY1LTN2Rmt+iTsf3uvoEr363LaT7qh4ehWdLDQRmtbMeJ9+/wAiP0fLjEkaFrHHVg+x4mpD7dnHVOFS0qPSB++lQ8cwl59l1GQulokAfyrd9jpUbC8BwKbIW2iypYYIClHeqxG/vqNiOJOIh7JwLS0F5nD3m26sTkRl7RpchR999a5Nf0VH/uk1dXbYWG+htWXmAfWVYj5E1JzXQGCM5PeLi1NrkLREKtyXlgH7aSFvsoKuqCsa0plD7S3U9ZAULitm3IYcctfKlYJpcxIcDad6SNaZaAcSp1kOpuOB++itIIGYp17jb80+8fb+VR+j5eUDZJSk4s/f/aq7a9D6RhIsjajpW93GrOLO/eFaCkuuuuyiBqhKlC/2Vi6MTVGbQtKubqa0Wgm4y249u+n2IbbWI7FYU9MCiFtJV1UlO7XuvXJr+io/90mk5L5tmoIt26fO16cDTC2kFjKFKQUp14m/ZbjrU19pTgWhxC2L9RZCR49lXDMmMlbPqxbuKOt0EkWA9vjTBLa2lDD0t5lJ3GoTThxLas70lpIQnSx6VtQe41gnoVoKFHadG2Xoq3++ojGxcS242lbhI6pQNx/3fA010FNOJhNZFKT1VjN+xq7jamll1ZKFDUXUfzTauPxGvW+I163xGvW+I163xGvW+I163xGvW+I163xGvW+I163xGuJ9/wCJKxXBp0NrnSy481KzJyqO8ggGv3/gH+3d/wDLq/P8A/2zv/l16HE+Tyf66z/4dZUY9gjaf5C1D/wqUrFMcgJYe/fK2XFuLWOzVIqPEYFmWGkto9gFhRStIWnsNXO0X2Z3FKHzP+gxLCic3HTd/Ehbi1JQlIuSo6ChOUjZNuE7K1vl7KYkuZNvudCTuP8AEcRkKbSqU4GQpSt1+yuaPOPyA0LpVnJCL9gqXBu2bp2m/pdnv/iOWmXVSWmU2twCuNqdXiinWWcnQy8T30y0uO4wdvcyEDpgXpDrSwttQulQ4/xFtRUIWv8ArFW+2pIwyG/zFxfoC0sBAFuOulNh+Eja5RmKFqGvjSI0dGzZR1U/xF5lzqPzzLm2G0Ge3bbfbybaHJjymr2zsuBY+XkXIlPtRmE9Zx1YSke80h1taHGli6VJNwR+QVDEmOZaU5lMBwZwO22/yOGJKjyUoVkWWnArKocDbj+S20OTHlNXtnZcCxf3eVLjS0OtncpJuD5FvyHW2GU6rccVlSn2mkOtLQ42sXSpJuFDt/NTf9E/eaNPSH1hpluS6palbgBSpmFck587CvVkKkJbUsdqUEXNT/NAW448iyWl2SoLSQcpvuOlYJh3NXsQxN1pLDEZoi61JSL68BUXDMdwN/BHpekVfOEvIWey43HyNYKnDJE512PtGtivpKVc9G3uve9Kw/GsAnYdPUjNFZadD/ONbAAjjUXCsawJ7BnZQPNVmQl1Kz2abq8z4VhcjGcUS3tHG0OBtDY71HdTWC4zgr+CznklTF3w6hy3eKxlxxaW204UkqUToOprUow8PUMHaOVE5xy22V/JRbd33rlBiMTCGIAhPvZ2miBtlITfMbJGppOIYZyUmTYyUXfXzkICTxCbi67dwrz0hZZiBJU7tN7dt96ViTXJfEXMBBvzvapz5frBveR76XPEbmqQ+psJ2ma9ra7h20/NlL2cdlBWs15xhcjpj+GlOdDq5iELUntyWJprEYoWlCyQUL3pI3inpUlYbYaQVrWeAFec08mMQ8w7+ebZOfL9bZ77e+pE+UvZxmpDi1GvOMbkdMdwzJnS6qYhKyntyWJqXiWFRHJDT7SkPBTgQpgWOZXfbsG+9NS8Vg8ww6JGTsnw+Fl8fojdw39tecWeR0xeGZNoHVTUBZR27OxNYjiUTMGXWOqvek5rEGoLsLkxOnYZGjIQ5KD4STlABKUb1Cvwh2yjAyX3dO+7Lb619KOJK5GyxhmXPtTNRnCO3Ja9N8ovTKhuJGzQE9MqvbL7b0zJxvkxLwzDHFBPORKQ6UX3ZkjdTLriXZTz6skaOyMy3Vd1Mq5QcnJODwXFBIlCSl5KSfrW6tAg3H5jRCxJjnEbzZmy5ynW57DX/uj/APad/wCasYSxe4eVu+qFJv8AKsOeg8tUtxFR07NCcJaVkFt2/hurFsmORsadflZ31MoSnZr4ggKNjWDxH8UfwnGs6jhzzaDv0uOzs4ioHnmRBxvDJEkMB1Ccjyb93/r7fJEJ4YSbeKq5JXH+Re/sqrkN/PVf+HWIYJg8+Pg0aClJekFgOuLKhwB0tXJtiVjMrGJmVSnC7YBFwqwCRurGYEaYYl8NClf9pa1knuvahEWyiJPgHm8mOBbKRxt3/rrlx/OZf93WD6W9D95rlGlgHMJazp9UFBPyvWHPhTYhCEm54ABOtPlGiOfOZflU7Ew2Hlso6KTuJJAHu1rzni/KxUKIpjbLYhR0oyJtf6TfQ/nK/urFQzckJSVW+qFC9RX8zYg8ySSb6BOWpmJtth1TchWzQd1yQBfu1pzEsZ5WrixTHLq2IUdKMote2ffWKAf5p+oGJNtpecbioDaVbrk2F6kYhjXK1xmPzcuLjw2EtgaXtn3mpv6K/wC8rCnW1IEZMJNzwFk61ieJxmQ4HcTWISV9XWwv7N9TpuN8rXQ0mMpSo8WOlpJNurm3kHdurDWMckLiRHFkIeQkkoXtFW3A0ucnF4WP4VHHSRJRlcy9x4/F7q5FT48w4OiXGJYfcZDuyWobrHS+oFSGcY5csDDlD0xdwtpCd/bm01qEwHxKS2whIeG5yw3+/wDMY5QbSRzwR9hkzDJbwvf3+RWGx1PPx1LUo7chR138N1OiHiOPYXGcN1xYkzK0fdakQsPYDDCTftKj2nvpMee2s5FZmnEKyrbPaDTEyTNxjGH2DdjzhK2gaPaBbyIx8uP88TH2ARcZMt+y17++sOxlxyQJUNKktJSRkOYWN9KwqdIckJdw93aMhtQAJ036d1ec0SsTwuflyrfgSNkpY79KgTmTNTNjO7Qvl7Mt4n65I191qkY6HJHPHmAypNxksLd176dtO48w7JZlOt5Hm0EbN3vItv3caxXDWnZamJ63FvFShmTnFjbSo2HR1OrZYRlQXD0vfWJJgR5GKIGd8sPLF3L7x1furn6sZmw4ahtHcE84+jzcU7O1zrTW3aLCn3lPJQRuSd3yFPw5TYejupyuIPGkNPTcamwUG6IT8y7Cf6op6JCXIUwt4u2dIOUngLDdS23EhbahZSSND3UUJkYuMOK85w7nZ5sf6v8AjT2B2elQHVKK0vqBOvsAptt+ZjWIQmzduFJllTCezoi1SsHbMh+C+VZ0PKGgVoQLAaVMwxcjEcQgSEhOwlPZktgX6lgMtNtyJmM4jDb+jiS5ZUyj+qLVJ5PIfm8xeUSSVJzpuQdNPuoR25uNMQrAOxWpeVp63FSbbz3V5mciI83ZMoaTpl7Ld/fSG5czGcVjNj0UeZLzNN+waUeT6w9Mw7XR8i+pvvAHGm2pGI49OgoN0w5E27I7NAK5jOjJXHHUA6JbPAjspnnuIY5izDRuiPNmZ2h7rVYaD8685OFYaZOa+1MVOa/be353RC51I5n5sz7DaHJe51tuv5C9MkvynucrGd5wrVbsuaMR7F2A8DY5UKUB7wLViMzC5akq2QcakR3eAUCbEd1YbKxmamP+5m9o48q6lKyi/eTRYw7EWn3/APNlKkKPsCgL+RAxSe3FUrqosVKPfYa0tzDJrUpKetbRSfaDrSW8SxFqO6f8mAVq8Eg1zjDZbUtq9iU8PaOFPIk4ohtbTxZWnZLJCxv4fPdSW14qtGCNRihSdiq22vxFrn7K2XnXEPOHM78wznYZL9e1rZvfUrnOJtNKjPbJ1JQq+bsAtr7qU9hkxuUhJsq1wU+0HWhGxHEWmHz/AJMJKyPblBt76XDgTW5T6Wg6QgG2U8b7vJyhmtY1iEsMyFB0ysyxHcOgQno9W/urm0qcuVjKczj4Uk6Jzaa2t2aCpU6U7sGWk6ryk2O4bu+n3MaxNxye0VOyCWVnKnNYbha2o0FHFH3wiAGwva5SdDuOledkyEnD9jttrY9S171ycleeMRipkPXhoYzoTK3aLGXd7bb6S9icxqIhXVzalXsA1NLOFzm5WTrJsUqHuOtBjEcSaYeP+TCStXvCQbVznDpbUpntSd3cez81ttYbiXmqT5svt+bh3S50saP/ALcf9ztfrrF3mFFLweWnMN+qkpP21FhReQbD8NbAzKOKM2fuOsRbjWIxsQgnDo5lFyGxt0u5En1bjsrB8SwpuPKkYe6V81fPRcvb56Vhf4U8m5mDYo27liSio7PN+kLXHiPIvlLhMCPjLbkcNLYUvK43b6t/27qfLmDTMC5SvM+lbfuNqka6cDu32FY9jMhAcxF3EVoLihdSEi1k92/7KQzBSGWZ2HlyS2jQZgT0re77a5avraQt3zipu6h6uZWlNf0T95r/AOj/APFXLKYtpCpLc7I2sjqglW6sdbYQG23MPS4pCR63R1rEsXfQlzEJU1zbOKHSA+r7K5RR4iEts80SvIkaJvkJ8nLv+fvfYah/6xz+2ak/6s/ZT0BdrP7ZGvfXJrk84Tt3sQ5pJTxCGjc/LLTvJdKjz/zt5tSOOUqzX9ltK/6P4rWjbMzIgdwyCsacmJDpgRm0xUr1yXAJI+fjULGFShExbZqQ20l1KOddxFrq93dUzEonJZOMyJclRdmnEG21H+TYi4AqViT2AjAsPkxrPtJltuJLgOiuj+rt/NaeUG0j8zELYZMxz38LW9/kk4FixYfS+tefYqNrK7yBrXm7DcTwSXARow7NbWHW09mmht30W5uJSsVluLzuvOqNgexI4JqBPw6UmFi8FZUw4tN0m+9J7qg/hHLwdECK8HgzAQv0qh2lW7yKx/k9LhsS3Ww3KYmJOydtuOmt6Rj/ACglwnZjLRbjsQkENIvvN1a9tTZ3JiZhyWJi9o/DnJVswv6ySnWpOOYzNRPxh9Gz9EmzbKPqprlDIkLYWidNL7IbUSQNd+m/Wo3KDBH8ND6Y2wcam5strnXo15/K4/NOYbDJmOfNe/Za3vrlFLkLYU3PlbVoNkkga79O+p2PKXHMR6GGUoCjnv0e61tO2sUk8iMUwp7D3pJ5zGnNOAMO8cul65Rc4l+cH0xk84fCbDOqxtbgBu93k5Qc6cjqTPmLdb2ZJsk9txvpWHS38MfwpGYsKazbW5VfW+lt9KSdxFqcw6S/hr+EJKlMqbzB65I330tvqXjSnmFYcSt2OyCcyXFgZja1u3jX4QbVnzd9JsMxzbXLlva1u/fXJ2ZHWwluBK2rwcJuRpu07qRj+ATI8TE9ns3m5CSWn099tRTeOco5UJ+WwjJFjw0nZNX3nXUmpbvJibhvMZKy4uHiCVZW1H6pTT+JY5ixlyHE5URY5UmO0O4Hee8/nx+dEmYxg775u/5vlbMOHv0pUeA0pOdWZ1xasy3D2k/ndjA8EhNzsWca2qy8vK2yjtV2+yufY1h2DTcNSfTKw1awtsX32Vvo43hojvlQbU1tQSkpUR2Hvpt5P4GhK0hQvzinvP3mTJYbHmG0v33zVjaZTcdHM5qmG9kk6gdtzv8AJiKsOb5MNsRpSmf3Sl4KNvYTTHO/wS5tnG22W3z5b62vxpkqZdmTZC8kWK31nVfqrnIw3k4lO/mheXtvZm6t6mOYa01Exlg7NyNLvlac/lW1tTMqY1yVcbcfDKQyHibkHtI00pnzn+C/Mb+l5tttp7r6eSHGyusRvSc7beiLD4I6mX2/tamJSW3mkuoCgh1OVQ9opMqOwiTKdeSyyhZsnMb7/CnPPvmsP5uhzHPlt35uP5XAsMbjoc84PZVOKPUAt89fxcRwHYJQ1EjBwu5rlZOXwGvlmOw0trlIZUppKxcFQGl6iYk6G0PLuHUtjQKBtXmgx4wwcyFRkSLHOXUpBI3236bqRIhtsvTXX0ssIdHRJPvFYPgRRFLEqIXHl5TmzAK3a7tPJi8mSiOhcWY4y3skkAhIFr676TiMNHJNthSikB4PhWnsJpnzn+C/Mb+l5tttp7r6Unk/greDaxNvnmpc7/qn7qXIVG5Ky0oFy0wXgtXsvpT+ORWg262ysqaXrlWkbj3VHxKKjkkhh6+VLofCtCRwPdSPO/4Ncxsc/M9ttO62bTyYzgi0RxFiMoW2oJOclQG/W3Hs/E5QYS6hgR4OTZKSDmOYcdfxVZLZ7dG+6ked+Y8+uc/M82z7rZtfyLOP8nprMXE0tbJ1t4dB1P7fsKWjlXyUQ/hytHnopzoy96dfmRRew4NCCrZFgNpypCcwtYcKjJRyL2iA0nKrzu0L6eynlYrg3mdaSA2nnaXs/wAO6uVX9LL8mOowvk953aOJOFbnPkM5T2WNMtTOSXMYyldN/wA6NuZPcBrWEIeHo2sPUtkHt6Wv7dnkedYjMMuvG7q0IAKz2ntrDv6Tb+xXll4g5iEtM5bjaor460UJ9VPt1oXufbWF4gtnNLVNbjlec/RnMbVyVwzCQvD2lYpZBQ4olBVoSCb1iMnAWlR5cpSGs6nCux16WvHfW05xO8/ZM3nHnKs+07d9rX9/fU9+Z0pbaksuq+sQ4jX7KONOPzXMcahpfTLL6rosL5QNwFHHsUfmTMZXCMhEkvkbKybgJA0t7qwqVJcU6+W7KWreqxIv8qabgOFmZMkpjIcG9GbjW2wiTKjcoEdJE9yQq61fyu49wrk7yVelmJzhgu4i5FVbPYapSew2NJx/k4X4TsZxPOGtspSH0k21BPfXItMeQ7E5ztPStmy0pITex7bXrBsYwMyIrxnIZfG3UoPA3vmue6uTW2j5+fulMrpq6YTltx091YWziWNqwnAI6MnMG82aRbQC4OYgacKwWXyOTicW8xLb7npNg4Cer0jqe6oPJjnL8bDRF5zLDSspe1sE37KgzOTTohQ81p8R19ZQ6ntF79KsYimHePFaQ+wnaq6K+gb79dTxrFUcum55jFY82m69glHdl1vu+dS/N2O+eMLU5eO3fMY38m9/Zpp5OWeHO9FvD3lSmkn6ihcfYKgYxYnEWpgxNauJKlfqI8K5IQ2ulGQ2cQX7LdD5j51gLsCD5ylJhKyR9sG83X9Y7q/6j/8AfLX6q5RKcb2ThxF4qRe+U5U6Uy1h3JjzpG2q7P8AnFtq+u6xF6S1iPJjzXGIN3/OLbtj2WFNu4bhvnWT5stsOcBrS51uaawhfJ1nCJ8oWYekzwpvxAsT76xGCp0PvmM648sbiopO7urD2YPJXzjFSFZJHnNtvP0j6pFxSm8T5OeaWMlw9z9D1zppYe/yY+xIeeRh6WGlSGm1lO26CbJNuHGsFYwxx9GD4mpTTkRbhUlC9LKF/aPnWLN8qsSkNYbEd2MSCkOZF2uCs5BUDD+TWIvy8FmhSXYqwuzC7aFOccf24VyqZmOv8xQWy7HbcKA8culyNbDWnsDgOPDCpEHbpjqcKg2q/C/v8alr5WY4vEVK/e+HthwJjjs6B3+21TMOwkYixgbsPaIjS777jpJvwrlFyacNm2Hucxb/AObV+rSuVHKdeokrUxFJ/wA0kffp4U3IhnJKW+pppf1SVnXwvW05xP8AP2TN5y5yrPtO3fa1/f31PfmdKW2pLLqvrEOJ1qDjFnnsWdjAvSHHSc+YAnTd7PxnuUeEwlYrFlNBE2KhVnBa1lJ7f/WpGGYXyWxxqRIbLSncQYDLbYIsTcnWm8FitPz5LWzFmWyoqOe5IA4b6YaVyV5ZZkNhJthnd+lTjPmbH8NyozZ50TZJPcNd9coESuTvKaTzjEFuNrjQCpJF+02rm3mPlDh3QzbWbD2aPZe++sZZl8nOU8gvz1uoVGgFSbe8ikI/BflijMq2ZWG2A9vSrDsbwXJ54gK6LazYPIO9P7dtZDyO5R8/3ZNh6G/+s7O+1YljGNKSMTxBYKmEKullI3J9v6qgNw4siWtOIIUUstlZAsrXT8XDG4kaRKWnE21qS02VWHS104VyVVGjSJCWsUQt0ttlWROmptuFSYLKgiTotkn6wrmP4K4z58ybPPs/3Nm+ttN1uP31KwlDbkzEFZVuIYQVkqLiSQLdn3VJioZeXJOGZA0lBKycm63bTcVTDyZXmoo2JQc+bJut21hkeUw9GfSF5m3UFKh01cDQRCKROjPJfj5jopSeFKwyFycxqBjDydmuQ4jIwz2qDn2VyfxrD9rjM7DmdlKQtz0khJGpBPHfTWCw8DxXCorjiVTJOINbLKkG/RHGuRqo0WQ7GjKcDjiEEpbFha54VhiIkaRKWnE21qS02VWHS104VyZxePh8zEGYUhRfRFRncsctrD3GsE5UHA5+JYfzPKqIGbvx1G5vk7dawqXH5PYnHw2JMSvI636dw9uzF7AW+dQuVOGQ14iEMbCXGb65R2p7T3VhqIsHGsAwmM7tZTkm7C3uxFgdRU7EVYVicyHOjNtodis5wg9EdLsGlYjHxfCJ3KPBX1ZoiokVLpbH1FJ+81i2NIwpzAcNkNhDEJYyqPaop9Xdu7/I0cNjSHI+Kx0xZjjbZIbs4k5ldmnb30/hmUJYXHLNuwWtU6bi0WRGeZYbhxw62U3QneRfhurAZiIshcRuGsOPhslCTZehPk5QtyYsmOtzEXlNpcbKSoECxF94pvDpnJjlY48lxSiWcOJTqe8imI34N8q421Xl2r+H5W095N9BTczm0jmfmzJt9mdne50vuvT8FyyXesw6f8mvgaxXDsVgT28TjRnGgpxlX7o6JsU/WPs++oOGyuS/K1x9kKzKaw7o9YniaZi/g9yoh7Q/TSYGRtPtN9PJjmKsYNPn4atltLmyb1V0U6o4KI7Kw7FJOGTMJwnDbqZRLRkddcP8ngN3hWKymcKmYvgmIu7b9xpzOsr49Hj/AOlJ2eDO4VgqU+lXiDZS+4eASm+nvrlZJdjSGo72y2TqmyEuWHA8ajzObSOZjCygv7M7PNmOl916x0zOTuK4oqXKLrE2ExtbpPqnsrztieFPw2JUIpRl6aWhwClDQK0+dRMYwSNIkynI7kOQllsqOVQNjp79fZUfDUC6mo2U24q4/Ok4S607CxAOKcaQ+goIUFki9+3765j+CuM+fMmzz7L9zZvrbTdbj99SsJQ25MxBWVbiGEFZKi4kkC3Z91QELSULTHQFJIsRoP4pOcng3J56hjaleUbO2nfe+vZ5ZP7ixCFsXi3+6mcm0t6ye1Pf+JA523Jc52/sW9kkGx77kfn9zzV+DPMdNnzvbbTdrfLpvvTe2/BDY5htMm3zW427/IvlBtfSKhc32WTvvmv8qxyJHxViBGiTloEpUVLiwL6IA0HDUnWsXwDGnGpMyDlUmQ2jLtEntHhXKifir5fbgTHUtgJCbITuGgpOMs4vCw3ap2kaBzRKwpPDMs6i9Rp8nC3Y+Nvvc2biPIKAp3t11y8aXiZxuDOcbTtHMP5kEotvISsa1gWIpUY6ESUSspTm6twUf4/n9yX5/wCU0LPb0MSdkbTYW0Fqbd/Cfle5kUDlXiN0q7j0d3l5Xf0uuuVH80a/sprl5Cji77mIvZB2kWNqiCZOYiSYbAaejuqs6FJFtE7zu4Vh+N4lhyS0zO2yorKVAqjbrnW/hbSuf2irTluGUzXdr7Mme96hjDoUnDoRTmbjv9dNzfXU/b/EN0xo0eMXV53S22E51dptvNOy0Ro6JTgs48GwFrA3AnjTnNY0eNtF53Nk2E51dptvNc6fwzD3pP8AnXI6Svxt5OcpwnDESL32oioCr+238QC7IdaYaT1luKypFbOHieHy3N+VmQlZ+R8mzm4nh8Rz6r0hKD8zQeYdbfaV1VtquDTMZ2VHakPfRNLcAUv2Dj+JzdWL4Yl+9tmZSM3hfybaHJjymb2zsuBYv7vyLLcmVHYW8rKylxwJLh7B2n8kt11aG2kJupajYJHbSH47zUhhQuhbasyVewj8VTjq0NNpF1KWbAfiKcdWhptIupSzYD8wwG+Ut/MIi3jBd9kXb6lX7dlLVyd8zx8Rb6UaVBKU5Fd5RUNrO2cblFEbaNnQLI1UNP2vQRJgxsTmKF35EpsOKWrielurDcVwrNHwmZI2E6Hf0eu5SRw4+Fci/wBJz7vLhHJOM+7GalguzXGz0i2L9H5Gua+ZYBbtbMW7ufHv+dToT0tEnC9peAColxpP1TUmfLXkjsyXFLPhXnFnkdMXhmTaB1U1AWUduzsTTnKWIyt9lLebYrVkN72I40zimyy54of2eb+Te16RymmR1xWjf0CF51E5ikAbqamYxyXl4fha1Ac5EpLhRfcVIG6mpLu1kLeUERmGRmW8o7gKadx7k1LwnD3FZedJkJeCL/WAGlBSSFJIuCK5PMyMLYxBcmZs2XHLXjnTpC4OvhXmbCcJexrE0t7V1AeDSG096jxqThU7Dn8JxZhOdcdxYWCntChv8kjDMBwaRjkmNpJUHktNtnszHjUxD0d3DJ8M/uuM+fo++/Ed9PSMD5MTMVw5s25yZCWs9vqpIuqpGKw0rOwSrasOdFaFJHVNc/wvkrLmIQDzn91BKUW4JJF1n2Ckv8n+T83FcqbyMzoZDR+rc9Y+zurEMWEMqSw04JEN7tSm5Qe4+yoOOsYU21HXlCYbbmVLYJtoQn7qGKK5Lz/Mdx+61PpC8p9bZ77UjGXVqciOJSWcg6TubqgUZ2KclZsKOpPoF85Csx4Bel0e8VBn7PZc4YS7kzXy3F7XplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEq3yOcJWpAPFTe8e+m5DC0usuJCkKTuIplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEKOsjnCVqQDxU3vHvrAYyWEvwsUvspaXdAbaaW43HGsJwtyPtTMV03NrbYgmwNra8awbDm43On8QeyW2mXZpG9W7WsSwSHgcmfOjqTsg28AFi1yVEiyANO3fUnCMQw17B8VZRnLC3A4FJ7Qob/AOFTOS2K4ZHKUtB1hT5Cg/pwBHD7jUvF8NXIwSZFbLrbjT5y3HDXw0rk7jzzZckxHUPyBbrJ3ZvsNeeUulyDstpmQMxt+uuT+EYNIROckzA4cm9tIuDmG8b769lci/0nPu8vJ3HZHQguNmM44dyD0rX+L5Grg6Vi0KKwVRYRCedhd0uK7BUvEm2g863LUEJVuuVAC/dUjEMa5WuMx+blxceHHS2Bpe2feamfor/vKjvrWkNJwkEqv/2dQ2ccfXFgrWobVCSShW1VlOgNLnMYxCx7CmEZi3KRlXl7jx+L3VyGxBiWcIRKaJaeW0HQytQHA6dgqS1i3LllOHqTZ7a4W0hNr9t9Kw+OiSJiG46EpfG5yw63vrkZ/Sg/4akO4Hjz2EcpWWBzhGyJQtGlr30PDt9lR+TvKJMGW4+yXGJcbS9r9Ye7s8mPsO6TUYo5t0nrcNfka5V4bh8I+eG8NUZEpDKekMuiSrffurB9gU5RHCTb6w3/ADr/AKRZDBvDUtQQRuKglea1YZpvC7/GqlfztyuXg/7eT/YrC/a3/aNYwmwtzJVh7q/6On3v3i3JZ299w0Fr/OsW2xTlU2Ai/FVxasE/mLX9gV7JTd65MPv/ALybxRsyL7gO/wCdYsuQU7Dmi9/G40rBg71tjfXsJJHytWnCU3euTD7/AO8m8UbMi+4Dv+dYuuQU7Hmi9/G40rBJ1lc7w5SZTfblz/8AKb1yvxpjVMaM0iErsyWdVXngdKPAw1ttH+tcF1feK5ZHj6H7Kjf0R/xH+FN8+ZVtm/on2lZXEew02cQxLH8YaQbpYnTc7fgAK2OzRscuXJbS3ZanHMOnY3giXDdbWHzC2gn2a0iZgmLYlh2Ii+eQsh7a335gd9QZT07E4cmIDsnYjoQrXvtX/Wvln/8Ac/8A+aCbk2G876chzmESYy+shVc3RjHKRuD/APCJn+it2WtupMLD46I0dPqp+3vNP4FZ+XAdUVLD6rnXvAFNtyJmM4lDb+jiS5ZUyj2JFqewdvbSYLhVmQ+QdFbxoBpRZMzGX4O9EJ2XdhHeE2+2hyfKXJeH2UCHz0jdWbhakMv4jj8uAk9GC9N9B7LWrzdNioXFFsiU6bO27L2U0JmIY7ikZo3RFmTM7Q91qCUgAAaAVhcmQ5ISuA/tmtmoWJ036btKZmLcmwZ7QsiVDd2bluy9OzUuzp+ILTlVLmvbRy3ZfyKxBuRiWEz1Czj8CRslOe2nGIaHCXVZnnXVZnHT2k0+qJNxnCmHzd6NClZGl+0WpeEw2ebQ1IUkhG/Uam541HwyMp5xhq+UukZtSTw9tcwhuPuM7Qru8QVXPsFYtAZdmKaxBS1PFahmTmFjl0qPydW9NEJq2VaVJ2mhvvtb5VJw55TiWH2i2oo61jUSBIw53FsISUR3szllNo+voN/stS0QccncoJridnh0Nybtg0o6aIHV07awyI59IzFQ2r2hIFO4fNCywvihVik8CKdwiW7NxaI4ekZzudfsv3U0iRNxudCaN24UiXmYT7rUlCEhKUiwAGgp3D5oWWF8UKsUngRTuES3ZuLRHD0jOdzr9l+6mUSJuNzoTRu3CkS8zCfdanYK0AR3Gi0Up0ski1LwyLtnY61FSy8QVKv7BTsWAX1occ2ilPEFXZbQDSsTxZpyQqROy7UKUMoy9mlIx8uP88TH2ARcZMvhe/v/AIYljKjKOtdX7W/h5QtIUk7wRpW2iYXh8V767UdKVfIfmh+U5myNouco1pqWvE0PvPKN0jen2+2mkqcW66wci1K49n8R48JbqxtnRtUI16HaaWiApBjW0v63bTyHkusxnRYKNsil/wAR3HYTJQgDKntV3mnlTUsvv5MuR1QBR400lxtC4odzlpYsDru7bUzLaCkocF7KFiP4jXtUjEW8SbaYfXmWlTd1J9nbTbXWCEgAn/Qf63xGvX+M16/xmvX+M16/xmvX+M16/wAZr1/jNev8Zr1/jNev8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zSNVan6xr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M1vX8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmt6/jNev8Zrev4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M16/xmvX+M16/wAZr1/jP4iwhcnLzhKE/R7LcDY+t20Stc7Z7VYv6LZ2BI/Sqeedvulk9BKko16KVcE1lDnQdXmZNvVF7/YPipySpc7NzbOCsM5L24W18acC3sRZOllObH5ZQfnURYkuuOu2KvowvduTcW8eFHOVlaVFJzgBXvtp4eVpq/QLKjb2FP66WlIU9+6ciWwBr6LNbxqOlt2UvOhSlhhLYIsQLdPs176FnZOVDWZWTZFSTr1weH6PfSVDUEfjN/pfcfxVLUbJAuTSnXtuhpO9ZAt9tJbQZeZXVugD76QyEyG1LNklaRb7aSqW5s8wUU6E3yi5+VKTzXF1expGv+/QAiYwL7iWkf8APXOoS1qavY5kFJB37j5ElxLyrqKRlA4GxO+lhoLGVKVdK3GnJL6srSBcmlF/bxkBJOZxPy0O+mW2Gpz+f1koFk+25rYtB1K9/SAq6r0GpMiy7266NP8AeoOxnNsg8UlJ++s7ZuL1mWbdnfQzJcte2goKQQpNKxLEVLDAUEgNpupajwFJbbbnRSpWVJkoSkE9mijSsNmOSW5KWws+i0sd1SWsOecW4x9IFNkUWHQ8t0DXIBpQa2ygsm1syP8Amq4alEdoSP10h9o3QoafwNSShBCtVAjfW2EWMHr3z7IXvSiALq399Is2gZBZNhuFHJDipJFjZoa0SxGYZJ35GwL1sVstLZ+opAKaCG0JbQNyUiwHlG3YZftu2iAbUMrTabHSyd2lvs0q7saO7rfptg0gGLHIR1LtDo+z8dv9L7j5CaAWtDRUOilVyrwFbRhLkyWuyWmUIPSUd3upKcVP7sWhZe13XKjbwpnCGG1liK3tHMqutm/a1c3Rg2Zb6tntNpdSVDUHdUVt8XlxJ7SFKy8Mw091PROcOxZUlpSWFIazX3XT3XvasIw6RhYmsTGlOuLUjQXVre44C1YkwiDsGoTYcZfy6aKAATpxG+mcz6l7JhDCmii2Rab5vbw8KITmAz6mo7RmsoZ2auhl1zX1oHnDb8gN9Ow1tc2v86xBKhcbEnw1pTWIOrbaYSCBfIDmvvp7YrzvKb6Kdr4EVEkOostalXPuNOpQensjb5085LjS3Xs56RQD27r1zZC3OarbzhKzuuQKfVe9pJHyTToDiGUIRqpY1T3ns/wqA+7MShuStLbSxuUq2iknwouKDrufV4IF+kNCbVCvu85o/u3KdlzGlrgsqsltBspwjXfUjGWMSaadajtgxl6lxO66SN5voR3b6mKTJjvehUClKukm5Rw7KdhNFSEOrTnKTv6CdK2UhLUZxzUKyZ127/2FP4U84X4wa2jDh8CKin9L+0f4Gp5ya+1/mkt6U0pZBWUjNan33Jy2pCFGzN7Ad1uNDbSlxWQhJ9GdVE1GZbkqc2q7bUaHL+ulxlOuPJyZ0FZuR2/ioSHVMs+tkNlGnklwvtJXZC1caLLstcRoIunKrLm99ZEP2CXMqnR1lDuqaEvLfbbRcFetldlRnTIed2isriVnTXs/Fb/S+4+QtbIpRl+nUDlHgP1U48h2JMC0W+lCD7LGoxcgrSyhCk5y4ClN91uJ7Ke49A0l0MqYc5mgFXBWqt1OvlxSencnaHL4bqacQFJTIxEPDMLHpOi1M4TtZMd2XfI6y3my5bE+ylYYl5Jfj9Bg7RF3Up10Guo3WpvB0PtocfOWQS8i7SVG5vfeTuCaeEVyZzVuzIbkIIIUm+Y6773qbGQlEqUhyxSvRI4VLnxkhS3yoraTwta5+ypiHWmnucqzvFR104A+NGUkBvbw21gDW2bL+ulvyW0yku5rgb7jX76jrMPJZmwX9W1QHWwUNrWopSeHQptpSrXYvv71VHUgIyZxn76SczSFhu28DjUtxCgsCcoXH6CKW9dkEtdBkJ32t/iLVEYQ1GEQAc1SU/RWFtOPaNwoTY7yi9tjnBHtFvZ2GoxS2VpRiCFLsOqMjgv86LLeV2ORohZ6tRnFrQprZZWkJ3BJOo8axBNzk83K04fSN1LTmyltxs37PRoptMJVlH1yL0yuQpKsvXsKgOoIUDnFx3OKH8DcCGkSWldW6gMtRY1gsm4Ub7uNOIEWPdWgkdg+2trHYTICkBJGaxFqQ4kI5wHS5s+GvClyZDYZOXKlF7/ioQ0ypxn/AClnMubupxSoYZyDooDg1pNojMtFtyjqk++ghpOd1xZU6UKAt7KdiqiCI2UaHaBWtR0PsoabaOYqzXzH8Vv9L7j+K+yhwsqW2UhwDVNxvpT8/F8PmtkZc63l5wOFujYeylmRMgus5DkAKtTbS4tuqO5KcihLakrKGlFWoN9LgU/EQ84wtaeitDmUg8KcckzWGntNkqK6f617p7Kzx5rTrpUc6pTpPR4DRNIVIcUuSY7bbnpCpN0jeL9t6mT8Om4cGn3C5aQtaVJubkaA0XHJeBr6Nh6ZzT/coA4hgyWr7w64Tb4KZwQOHZtRUMJXb6oAB+VBqFMjZ9rnUp5R+Vk0W1PYVcjXKtdj/u1GStbJU22BlaRZIIFtKiy8GxCLFcQ1s3EPqWkHUkEFN+3srm7kvk86q9wouun/AIaL68RwFPcH3f8AkrzfIkolSXJCnnVIvlBIAsL9yRQcbeRHcAyhVr6d9GEX8PWrqiQb5svst99LW/LTJK27Kt23pcV0JUhW8K3Gnl4bMwsQiboS84sKT4JNfvvAfc85/wAlS8RnSo70l1nZBDJJAFwSbkDs7KexLDpcJAeSnaIkFQsQMulgeAFFKZWBKVe4Jec0/wBygh/FsG2P1EOuAeGSoGFbXbKYScy7WuSoqPzP5nb/AEvuP516ifCuonwrqJ8K6ifCuojwrqI8K6iPCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifD+Jrjj6JiWUrShCw4Uo94vrqbbqjMEltLr+UltZuUhJPu3VmkypAaZcW3ZCyFuEKsnUak93GmlSzZxLfpCaIMpDpkILiUBd9n3d2lvA0I6nnVSHMuRZVdVli9793S8KivPOW9CkrWs92+toyttwEdFQNwanhb7jjvOkozITbKCEdUcN9PIHOkqSvpNvrzFOnbrp76lSw/JEhC3Skl5RT0VGwy3tbTspLR2QGZKVAu2Xc23J4jUVdMdPNs5SHNp0tONuy/fQdefeaYT6jRKVLUd2o191ObZ51D0aGHOiu3S6W+2/q+ysPVtcrCl22aRv6BOp+6lrkImoK3lJaUpwhGm4Zb919355zKefLeYKLRIKLj3X+dAKddCgvOhabXQe7/Go+SRJbU1ex6JuTvJuDrXN3XHFg6KVuJ8KbzFSSheZJTvFRnAXMzLWzTc7x399NsoJKUJyi/kkAlfpl5yfqmwsR4CnPSOOrWbqWu1zw4UttUmSqOpZUpno5Tc3I3Xt76LqXnmwSFLbSbJUR28eysyHpAbzFWyuMtz8/de1NPc4fZU2DlyZTv46g60krekXyZHCLelTfcdPs7aYUbgtKzJt7CPvoOKefdSlRUhtZBCT9vE8f9JeUDaP20TSuevgO59OjwoFJBB4j8glljR3eSRupt6E80ym1nEqHreFSmJLgcLelwPyGdnrFVr9lbPPrfr8azu9YKtft/IJZcz5j2DQVzTp7S9r20pUZGfOO0aH809MWX6qxvFPNPpaW4o9Fdt1Bp1SVEHS35BKGU51hd7X9tCPKjMojFOVSkq1+2i499G2fR/yvyC2l7lVs7DZ3+k4UhpG5I/IJddaCnE7jXONkNt9alPoaSl071f6Of/EACwQAQEAAgICAAUCBwEBAQAAAAERACExQVFhEHGB8PGRoSAwUGCxwdFAcOH/2gAIAQEAAT8h+Doy/wBkGl4UjsDuehXzMilIzCF3G9ePhV5EwYn5SafA5HAES6wqIe9DvAPqSonNH+potAZ+c/6z89/1n57/AKz89/1n57/rPz3/AFn57/rPz3/Wfnv+s/Pf9Z+e/wCs2AT6f6cI1yPxg2UXwAfsGFy5ptmvpbXKIspQv32U4wmREolkjCnB69axhV0iWlDvXH7GJNJHFVO9yqzi+mSY3IzY9XJnj2WM3t28H8Hed4qhkmNyM2PWTtDs8YXbtrRipADa4IbkCEnJRL8UhpLVPzf1JP6EpH4SjwHl+WCkETUninJ8OP7NMuXLneX4XLly/H9i/wAfHh0mlhuDyI+uIZIVMKUDmCgMJN21XR2ACpENvOOiwRdo0BA4NcC1XGLBIAMheGyrXVjgJaM4FfQhWdA+EB3uFlQHBdo4F8Y2LPEXCjpifJOFcHqI6HAtIDiUYEgl9HptKAKVNmArLMi0PJXCaAIaADWlwyQxRYA4FIC6FceJIZjKEiDyjdpInXqioDgu1OBfGJ4TxVwUdonoThXHPXKwjRJNEFbHiMdaQRkuChQPbNrwfrj3LZtpHW7sDrBp7Y0JEUbpKwnijoDoseDvE/5DBw5YgEHtuxM6/wDfNSo0eHK96tKGIojyt1OLm47TKhogvJul+mJ8wr6Q2H1zg+zT4XCobQSLoHCJPnd5tJfnwDwNC7TjOWKBmqYK0vJhH2GSnUVHQObj5HhRkgh3INrrpcDWdbN8d4DpZu4YNjga5FYTZ65WYVdIobUZh04Tvq5Q6TXS6HsCTly5+xf4+NJibhHz6in1zr0sgQs9Ii5xz2MDQEVQpFAJzgv1F0GCNNGCNvDXLERO1JHGwNx2TxMQwd6Ddw2wPHw4P9IvME0aTw45P4GuItyAWO1nbcZGaw2QRdIseTZTVTDd4rL4O+WPj2ZjiWjaWm6sTQCQxEaMVxORY29Z++oaBu26RLn6GTkCaNJ4cEVBfDCLchLHazt+lZxhrsk5pfBnQh8Tkv1Ds/LL0w6BsbIKTJ9d4qpEe1BrpXtwrXJbHIGkLwbSfqy2T37KgdW7gJwGk/8Ad1iENOg5MEvi0kDW4MeV1G6yETlCUOgC6NHxO8DL85DDn659t6ZcYGkioXfTuesX+0d69geh8+XHbL5g0iWOZ/3Omg8Ys5HE5OcHYUulCPTvWC/4PLkgupFm69pgxHiaGw5c3zesFkR5LTJjS6BxxLjYiwQlQWbgob5YUZI40bSsVUV5y5+xf4+Lr6bOHuvlk602j1VBQpdIuN9K+RsQFUAOwg5PmVa5qFfakkNBkvqtJGlCAnm+jGsjbOKAHhNUTp+HC40EAirwBu4mnMAqFAFk55dYKsg4ChobXYSbdbwgar/l8P0ckjOG3snmx+cy/KraetnJnjmEx5g2bP1weKKBx3IMrqb3TJ/TGBpC3oLPAxw0UeTyz49jnf8AB3/Qes6+Hb90ZcuXL8Fy5cuXLi6rM1+U/wAfEgQ7EobZoWeUxLToPECCq18jjYE4fUHCubYfRfQ1/lnXaY2VI0NBF9eSujVBJSmhavafChDSNTkPp+g+uXHNYjlqBArC6usK2W8YlfBLV1ybw0AAXaSjDZwBtAbwVTaJNab74Z8sEWzVQEmvYeVbXuHoyxaqzpqo55UxyQbnpB4V0PHgwU2LcEqNOdwcxfOcG9ZUZ+zz/SUFQInGfb/6Z9j/AKZ9j/pn2P8Apn2P+mfY/wCmfY/6Z9j/AKZ9j/pn2P8Apn2P+maOPUt/3/ATMUQKtLi7iE8ub8wGRbJTfMv87IQWOif9TD7KAKqOhXjSuvDxjHCGtQwX5BjtpyFMCAjsCPcQvv8A+GKojnU1p+uFb/sdE54QPbj2rJAJqPk0osu3I22IZgvM4UjPf9jnA/ENOaGuueufGScV3BfQvZmibKsiSAmm/Ovr/Y11hck7b10aeq+dcBiOIaKt/RXXHlyfcB/lpNs0lkXTgYNI0D2f2KgKOecAqrp/YYSpLaGCVwtt+eC8kiFBtJ5YQszBWVV2t5V/sXT3/wDvW/UkzrN1Tb0nJUlM7yOL9wrCoBtDfnBBpPrbETSJunxn8NRb8H9rg2bk38F7LAMFZMHY73/H38F7zdW2tLkqSnw4MGmNL+STn4C3+BB5TQYEFp9TYE0ibp8O/wCk8ftscDZ0iKvyDAiJoLnaQdajvBOiL29dKDlmzeT2JaAGrQed8+KiPSYL+WYcAG+Sy4cZJcsdWBpgRJwA61iRp67MEgJ+YRqMGUbxVbqSvqop5uRcQvnJviaMnZ5zWV2TCoGFg8Xemas5x1gFKXQB3jho6bNJsobsOtWhCLXsQ0g0iopOXNC5hwKHwBQ2SYSkXEW5+SfUmMGEjaeYYOaRB3nXHtVYrRek15wxMA7A8e3g8ueVr4QRYpslpxhLuMj0Tr6nImcM1xCVcZ8n2B766PNETdyKQrKykA8tAPLi0/6bPdPA2S0x4s3dag3cHYAmNSXubpQC7ALaMUuKhGXVwN/LODyQahAKURx4jCPUkSI7PD4zpx4e52vkLL3N4JCVIktlDW4/ri5uAl3Bs0I7mneX2vGHIIT5rTiLrDj3q24B42V9nKg8IyRSkkR89+sBsxptv9DoIj19FpPb3lXvnN65TlS+mX6ZLaDtAbtLyXdHFVEEXbIiO5qeMYgGvvZAfIUsnMQjw8hRgBYL5SIo4YfgrH4Zf4X9ckQ/6GP3xIotj+9YP4ZYxnBLOnu7A1e0su0QKCpd6cTRjwpLniJtfXGTdqWdYA4IfqDrPt3hgGAWwJ5suWCP+CZw1kunHdvlG/LGE0QepOATE3FKpvYWdXNHM0yaz0eR+bkMFfPgFgC5Kf6QV9DhTBlEu960axB0+wWnm4QvrBqQU+OarWtj83EIV4f1yxkitYkTRah465wvlPNWDqHBs35fgnY/W4Tt3qI35ZfBNsjBbNE1Jsmss+PcDJxSQ02d5cDxRC+BxFSSlKYF6q3UhQ2tm1elxhoYcQLU0ip7ROMN+oCxCWXARvOD76zQIGLqLy88/wBDU+uOxrZ5Hp6xBy/pjh1MB8kyxoc3vOyd/PE6YQKhyztXl9HAGfRihZ1x8mnFM3plZ1ggU1zeB5DOssosFqTfYenrBiPhgkKlYsiY5Xiw1paU14TvLkXVBaBV1DU1LwYtz2qWG1GvRWYlYt5bBDwHMc6wkHzsggquGwOD3X5DHoNoQANUd83I/wAhFFXYAu+gxW+1nA3kSDql93NSkWnt0h9E088YkQlspmvhBPSYxH/Aj/I+E2OVUPdE0gGh8r7uEnNapRQOhLX3itd2wulD05CTsULdxUuBGzIFnuCII8jvA7kgxbAaHhU83GRDokFKCOOX3idOFdEHYG67BzQ/q6xxA4eFTyOJSuPWAWsp2uXIKJcqAJI0V0PAZwwTrHaXIHfa+cj7ArqQRBo4tMcAm3j6KRUTZ5ydklkXCRh1vDsuuqBEnI9ammmsWt8sacajR44mkTCJgEANH9Fazr+f2f1n9ZX3f4e/jP6eAwum58z+pLj2+WcCDeAkoWHjIez7dd1xne9d4atzE2R/CEo8OM+JIpp5KbYLzkQuoADaoI5Zx8HwJstcabF7kyfZQ3RxECzVN4Wa6Bo8KgHtMgvaFF+EBXpDNDBPyZhaFNPqwXmK3iE2YhFI67waFP8A+tlNXb1gSiJbPOwnyUNecvu8lZ4gBY8m83I+aL4jY8mlxvpzhlBNbQQaPIR+FfoWZQUSgAUecGKU3UmITHwPGbLo7CJoC8g4yrePLly4oOi8GPXwwNUAK2mg7z0AE0cBLx1L6zYkJE39uddje8qpg2RcxML0Y3m/dVTI9yYOrrBB4oK6suB2NnOXgdr0g/0vSQ/AxWsN636zZK1mAvB4wwJwxfLEFAgNoqVbsfM6x1U85nQrpTmVVm8mWz44xKhD2TmjTY0pxlqE5AwiFWoV+AWGn6Kq8ocV2+2QFFAUCiiG1Mgxd5v/AFOAgeR4eDwMPxM1YKGqxvmq7cSJIq7I/I9+YeMMKcYO+O9QoSgduFhZ4yMZFA1enbV+a+cG4dQBJXJss9/LHI4gBGIDgqodXOn4/DH2PyzsAb0Ug/Rj9MlHWrgX9MwKEvyBB+TAJieAAfsYZHMomE8NdsPysOSbTQU5XXgZuf1/Y5rwFn7ZNraahgiKU+bbf9L5rNdnWzyPb1gqyzFE3pRAHy1gY2eIWAXwKfoaHt0sw76y8H/4QSoM4gea87703DlzzW+dVt48uuE7xrZ0IRtMAA1+vNNUFya7QoiPnngFcqq650JW60b7hDNUpTEYO3g2h+6rX5hBUFiaNFPeJqEIVRCKu+GSd2YefbHlqfqNvWHt70m+xHThe95CyWhKlTyHlxrNC1N8FwFfcNBuZoZaYzOR0EHjpx8Bm6nnVknoU95ruKa+IiI4CteXC5M/1YBjN2tEiI4CtTbkut42qKBrcrjWVs/D+xKPLa9Y3ngH18oXblOsh4LWrw5whsOjibYLsR3iu4FN/wCieYcaunZnrWoNg5t8FkblXTwU3zf6b3/AmH8Pf8KfRbWm01741tec3cgN5NcvrgrAv/onw1/BM7/h7zv/AMSRPIK0GN09EeOaZN5VmwL4W+D6pzmjggZooDxXOTluTQIO/ef5FSb+T4k94HLiRVpounJD1jkeg2gOdR1N63dZ/rHBeNpZdXPQQIs1dwUrHnQ5xouyx6H61mLAmmwPEROOwHSQjgfJQ1wWDk7vGs9ynh4/PWc+86xdxCFOQRiO9OjeGDJTrLE4fWJpyxsQxyAtU+ZiTFmnQn5m141/NVYqhm8h2uDdTh/hJLpCq5DQAzupycZd5ceMQtYQBGKTThibdBwgqs0PPeLhb7bAOCtPI3jBJwKKtBOB75w5MMiY30NOkXfPwF3SIEKl2bE+Rg6UypUaA/fPcp4ePz1nPvKEIeyBL6iHtvJCNfI6bK+f74ipe0vqhF0HpR6w5nIkVLQ5Xbn+doMfQWW9fAy5fjG20NpBnfxcj0m3jah9QP4f+oy6vrP8QwK+wnN7zr+RsyPGdoOmPXHhFFTpxIdZUSj4yQgDGhMB8kxJZjTsRnC+M4LEj6baDR1HPuvlxwp43xkaqugaa3mq52tY73OhDzhacoGr3B5/48fD2vA5cht7fhTOM6wsVCgrdMImqdlqVMSWbeTjW+U1pSM53ZfeDGFuhIKHfnXWVgZlKjlZCvFmus53/IWun7Bp5bxFNUc0Ze0a93DxPZIAEwmgCnnKLuXRHIIheS1pcRkvfbNPbBXtzyxVWVL0wl5LTZgaC+A1Z2cnZ891oEzjDUAROjspqNjnKHrGt0eturEmYcVBvAUB6W499pDdQarQ9b4oJ41w6QHQ0HmL3nX9LABDQtivKiiUb5oB1Syn0SvIYzEJeKkjfAvzewT6CJJJ2rcqHHG73Pxx63tsMSb4zhI4EA7DKGyUu0cLjOXBjd9k5CJxvPDOgRUVoH0D5uFRGjyD964fI7lmjb9DHya6y7oSBXfMmasrvaUcM0aZxTmZAkpvrnW0dXvLIBa4Gox3xes0Nr2NWNNa17xZfjnXVAgpoqzTQd35a7qK3ABWWWFmedmOJ3Ygqb5l7xZ5vnbBOJRV417+DLtROi2HeqPR8xLYSvuRKK8iuhwkWAcHR0oreKj9DJKBLcmYZEYvd8L8szgIaLYgm08ZGA+x5uxOO1eXBiK/PEJQ24m9nM2RcEySAGDWqgu0W8GPRL4Hez1X1Li3PSAip6j6lndMbIZ27NHuZvP0ia6fsGnlvGa1RzR/mRL7uJVdN1sBgOINGv4gXLeaRV4BoF+qjpDwkqASLD/PCMIUiYMF2emjG3FTUoBmPqVImFKu1ngcIqV7yBpV5JSd5c0S2fJ2dronTjpM2h6G8idUk3nHoOabyuAd5b+h7BiqA7ZUItnOGns6a/wsE2hU2QooxBS8N4EqvpzWBgU3hx/Auo0eZTAwUq6w8o4omnDDy6y/nr4XoPi7L1c/RuPDObk4cdeWN98UGpFQaoeWayaCBEgu2pLhP8gIVN5dtSW5v0WaqFASiP1wYrHChuuiiy6sut5ywkAmrW5tiPCLN3+M2Um4LTrt2QZMHmQQERZXQ48BIqRPWJRxEqdu8WYaPMpAYKVdZ4/wqQHb+h5S5rUy7tQ+sfJHZBbgMuXN7o6K24aV56OVCeBeHg1tQPXB4cjYu7tPMgMRJlNos4ud3Ng5yRyblohZQr1y1h9G4JEQdbKdKmtveHRxV1gBBwukMBICI0k/0MSNFIoYBUmxrnGaUTtAJBaaXs848sHwzObACiMTTjKiTgobR+2cUI5Q+Vh24ormmo8T+hbgtlImw+pHh8i4MVbIEGJNEdrpwRShNdFEHhOQzqPHCYttjU45T4WxhvIOrFBEt2+GEYKQMNtUIvHk3VsGBeqjxKvgnKiYiqZiJ0HqqEl3dY4sOJEtiR3HWAFgDBRU9nFuGA9imqLRJd15upHJbBtHSOotinDvWb14E32BgLs0OKDbEW5X7qcnhXEAICA1Z5Z+gcOGc3Jw468sn59QakVBqh5ZTJQIFhHh/kd/yOv7CiZLcDFB8B4e/j97porTrt/AKRnZTw8B8r8v6/8A/g1L9gJ1M/2Ok9mtLLrHnOIwewdx4kT69YE7lUET0ATRsbyM7nSOiEBitBym5VjjwYFGFV0LXfOQyfOZTsAmy83XAIkKmCyIqHb1e8cy5SoNENAZZfOb7M6FSeybp8nGTX9emXrgxycsrvauenMvDYclwnx+x+X4JZEKeaB9ST64SNNliicnCHmOxxU1LDDkOk2q2XXOLuY5H+g9NgeWbxRqLs05H2q8tP0zx/YS7zIxuZvqO8DMfXyINBCV6zl+W1zmQp5d4QF0TV3G1fvkJNTBAjMR8oq+/wCwCmtRfmVQMeiRR6eYj8DTCUfTzAcBVWl+XTTl0fal+bNj0fwMdvhxfHJcA0InnN0aa0uSpKeP5Pr8NkFX6Bvf8ptaDYhVLwBuuSBCODyhE+X8K7QBRnaugwRKRHv4rtAFGdq6DBEpEe/6BrOWZViDnWm6OelwQYpFNEl4eOFOTZkZSinmED0zREeMvxZ5ew2i+PrWuICVE0OynIBa40NKY79jrHXwnNeFwQ9Dv866oy/cB0+rn7wZ1WxNqoENSL26sGvFQV5AB2qgHlxC4qMZdXA38scGAliushG9OL7Gzl6b6WfTG4GYCGhKVOwm/Fx0/AXcQCfNde3WAr0sMh/Tfy5UMF8gJFQSk7736ct9EFEwmV2qL82nStc5GqK6XEKC1qdm8MxtD9DjBsuu9XcxTKfJaKjz1rhljI3lEUxZ9F4H6RWzVW40VgNdb6QdZV5PkCPLHje+fph1BSvE9BEUGUN5Pz3L1pNjLBw+UxocaG2IIiDK8HZ1k0+IMPAIc6Nw3NpsQhohaioNNlwa8FdBQM2nngG8ZcSyTXpbPBTep3i/SGr+AWWWY8MMqFzPIaaOWbML0J40dmuo7mLqIimIAAeLRzNuLZG6qKJ88eGGVC5nkNNHLNmF6E8SOzXUdzBtAimIAAeLRzNuGUcRIiO7gMcutZObRwItpCvY4zaXo4TF9tB41d7wIzipoIUBVpHGbVtplNIHI4JvSxn/AKaMP2JTxrTsV8EwtYZqKiGpeFm054Tv+2UFUHmNNRXgwkd0d5BDpwnIiYkf/uOhIsUAYnxgh+T+mOvhtQP0SUXAaPyTrABFHNxZekChsp0jsWhe8rMh62aOlqHia5wskPFWDqHBs35cpA29OZuBQkD3m5TNqQCTZ2Tcec0WTpddg2Dir0uMTYTwJBmloL8zjBr2Q9DmfKRvMwaU0kRARdcuX5ufZfOIE5+VVgBssRotGW+Bihu0BuuBHycY0lV5AAvSw+Tl2QyAgitxNiafGM6BHrQv3tnI6+dwLh5H6mSYRenNthEAFZn0P9ZuSlQvF+HEOCMw0TTC52r94egGNHYrsavux+nw+IcKK+TZ/vO5Gn1T0mNQw3pSA9qge3KNjQOX+aWEOKK+TZ/vO5Gn1T0mNRVnpSA9qge8lNj0kFPpD5DGDT82oQ+7r64yQZhwfqYF/kYS8OAfTx/YwA2hVff/AKnf/wCot509dxpesGiRCCcPQeLM6v6fxzwSamWEwQbuiPkaOgxRf6CFQYK/S7RcClSFmK00ddTn4E4C4Wq9vvDfzo99ImxPJswUbqLi3PM/VlBhJRq9l37FXHgB0KoekiCPI94Lv5dI4g4eFTyOK0/pnBX/AGG+cp2qlQtACo7Kd83IozGRZVGwukCQ73kzTKKI0iGHzyZH8pQiRzDROtcZWbnCjjUaPFmBxYAgB6wkTTANUtXRxH3mueW7t0Im3q5RoYPr4APpfhUPLYPpR+cL3c/yCwflc8cbdbcuoCV7zVaeIITgMs2NldhaL7t4PlhAzDh0agOV1ijoZ0F2BNeMVSc2jWBADij9c5cD5NCp8sLpwYAEYol+jlgWnxw3aEOGybMKDC1iBKKEhOmQswKoK+0fuZD/AK6CNREo+RPWNnS6GZAQQgk2eciy9fBwBDDxfncBhwYAaAPGQ/y6CNREo+RPWNnS6GZAQQgk2ec2qvXBwBDDxfncD04hwAdEOPGOCpjAg1AkA4xBY1oUBQImid5upE5NCQJ9VyzCwWpWp5Hp6/8AZXFzB7kr4Wt6zq/xd/y5/L38C0fLyHpzSdZz97B/k9fHv/2bYa3EGWdj67tu7/h9cphwbW1h7Ahvf9j6KxYLfYcyyTvIOOnVU0HVDpwp4dICQV2MUJpWO5/YyxLh+UsnFqHT0HgO8c5HcBjxwpLo1gNGTb22kcP0OchoioFRv1Hff9i95CQHtw8ilWgCKwa1ZOMpV3qVgG8AD/4dXv6f+me/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase8AIJF+Uvn1nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPvD/ee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2PefeH+89/2PefeH+89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se8CAF/X4lhPRkCpHkjw7QN6zXUgCQTrwDzfWW2dwEbkGtTnvjBYuGfNr1sRq7/RnOjO3DQfmOHnODElSdug9MfLNYXdQLUQq7lMO7mpnZQjxVTJvR9cfEBS702gH92SJ2wgjY8bWrr5ayknsJ4IgJCVrn04IKPKhZB01RZ6OAyhIxLfTxnX8dD8FMOE0joCrnTuh/wBT+znBCuhT+uv1wr+SJqwKLvWKxhwwLxnQuNCDh9lqh+iDgAeocYXWBrFwIBCA2JiKnjJ9vjHzAav+OMX0MRsWki+Eb2Z5P1L0B7VmPGfWxnSironWU4tEWBTTd9QfcxMaiAwzk0u/nngy6wVL0AOSVohpxE4iHNg8i8mAHaT5EyaZWDteDA1WFC7eNDf2xQo7P8fPN0jLmIVCwXaEHLilMnxUC9WX55DgB0Rot2vgyB7dM3wumas8mE0QlwUu6nWCaYhcnj2xAIRaJnYt5uvTfd/8aqygUWtvngzoJHH3WW+8DZpWG9A350B9M8E50tNHg0aPBjCXCHQ8jDjNYykz+gw+QBCCcadYfqYT8gHxQ+vsXMprGkhSArlPf7GsoCNFGwLvuAfQwxe6oO801vesP5FAbRAqBXLeYiT1UlD3lEUGr2Fkr3XgyjDRx0z0iCdSYZhvJsDdGkJ2duAKoVJUS2cmmWJc48poVzT2oTdZMGbBImAoHAKpzzrD2tUu6O5CIiwmSGzVHRATYC6DfGTemq05VOQla7U1MM9BFKQr38gwtqcLQXtyjV+h7wZDgpA915YzgnI96P3DN5GTYq3XcDXrFpExbjyLkbN5usTiK7p39feRwLR5swQwbHA22FlsxDs6myABAgZ5fCt7v95SSSVFT0KjOU9sXlmKuSQJHbfr3liQXTTABxqU7I8mVNAjiMTD1iUDs4vPvWAp9dBUWopYAKdJkzcZ9U2g2jXyezGYAxGZP1W+4Y9LQop3jcvpXz3hPwGvFhvh3Lx88EYAeP8A4+2t60lljza+ZiIgkhFh41+mMCokYnSfbz79ZfxaNLvQsDNKMBaQrv5JdP1wJUnkSgv7/wAJI6K7jrfjEGT54Q0vcaXCEhbinl+TiZatXQB4dNLrf+ckbpKoNrvr5ZH29jQ0jqP8dBuX2XtRCzT28ucZLYa4QFJXZzXlYK3ZtcwRSWsFIHOmazXUdPzp1kviyKY1rmaH5Yr2S9V5RXhzZOTBZkwmLqc0jvpMab0eVQf2PnKx9NqygQUpg6PLEEDHVcBALUFl8Cse3vc3dkxV7PECMQJEEQvLETjpzgoUAFCLQ5c/tcAvhBc0BwRSfpgGBrAkp7mGAAapdFncTDmOsCyga6I59CdO/XOJXEyw6n6GBylljkP9YuMxgPK2c6e8lgUFQC9+sfORQd16XzlaQu0BXB0zyLrhziQxukoKDSCw3V7ACFmwfJ6R9HzMW1g05Y9UH1yh+qBSciZEkpEsLheUu+8TKp3zAX57cDUY54NnDH1yBJ6vGU4umC14acdj5piwWsa2X7md/wDi+VBGPh1vH1IjrI6E2dfpmyzxpX1w8pTTl4i+H6UXUTr0ZoL2NCIl+XfFxrkJiwtVTy/wo+QwT8K5DzOchC64PPYazofoh5l0n75dGZDXSePF9YwUx/jD9bksQUIjUDjzvOv4qE3cRowcm73gZ/dKoBHlOcsm3LKrC4PRm3DOvY66EIVLF11h1uAQWdAUN+Os2MIN3ShZZfWEpYAEr7G0B524aM1QN5doaeuwMQ/6jKxgJSXW9OIVPMBtDoKxp8saYdt2SvC6uuG7ziwVIa8QLOqfPNiIDtxB9DMAVQGnV7E2/prAGLTZCRUf2YgSvta0dDtnXHGTbp+QQG2wj4byaKo9wHL642siA5Kvn5xwcD+kMKGwhtdYY1dC6ajtKyJ8964C+Q+lpXGqlToAonkmt8dYCk4K6Ej+uXy/XH8lp43s5xYhO+Hz9eOg0648QtOv1YJ0RdLpXDyJu5ZlomVzo3yKSeKfcGvzct4luTwDopPX9uUWVVXlRvPw/Pw/Pw/Pw3Pw3Pw3Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/AAACHX8nrOv5Xf9YCRS8rNw7ByGt95AChDmGApYY6O95yPNADZSAnka3BgiLDUN3r64FVP7YvEXl01vyORAa32SVugjxGCDHyrql/zlKzfuyOS4p7FuUhR0HNu9rd4vv5E0IFUjfLn6YM96s2RcIE6e94WABXFRppbGncGZd4/KCjplBNJ4Z4ADhGEAlBPIDlTxjJ3YSrV1HEI3Z1vFHLaaLaqbCag8t1CIUoECB5RUy3nj+ssF1iCCcwKDAFONtMm/AE4NIkU0dLcQUazbTedu9k5QhiMNlYO7GA3wwNLgAAJVDXY6RR9OKidKYQdNje/bh/JrzQIZ1mg0aCiGhqU33hy7f1wHAAgGjLNBFFGwO546a4yH2yoQHToCCCGzm6rXSRpXRRVa2PGiSYWSAdGOM1fC+cfm2YeRHSc9NLzjlsegrK+osIriiYSjOgAoLxon/wAL6zr+TP6rt0K4A9uX5SmwIJsJzecPq1Ep/IRSxOIPG/Od7xS+Ua1xjY5gJsUZAp8zH+PigHXxi39p9c+sLT9N8fv/AIzmSnVOA3959P5Cfxita+f/AMwetogNnrn/AFnLQgQIczf+T+kx880fyHrGjZS3EJuXkuKOe1oD1sPf8iOcdQ1B2nnCxRCJJBIt/THwNlCbdMOJ/n+QZdKXw9OfOQ9Hn5+vzhlyS+Xt/kcgnBetmuH64sW42U/44vvOJmYvfOrD6f8Azn//2gAMAwEAAgADAAAAEAAhjAAAAAAAAAAAAAAAAAAAAAAAAABChAggjgAGkEabZ6/z1uSAAAAAAAAAAAAAAFVHwB78sMjgM6ICbjrb126AAAAAAAAAAAAAAAIZLkpMAoofgHRS6CptU0gAAAAAAAAAAAAAAAAAAAAwwwwwwAANwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOJgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD39AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGu5APZ/uIJbP1JctyP6wAAAAAAAAAAAAAAAAAO8UWyf5pZy2EqPuIi3oqwAAAAAAAAAAAAAAAALDu6L3+Tk9EPyAa7AAAAAAAAAAAAAAAAAAAAAMiqgt7hMe/Vszw+4AAAAAAAAAAAAAAAAAAAAAI/JrcWfA6KAAAAAJiAJKWDNfSQiBhHBDDAAAANgZoH0sUi1yZKhzFrFgkjxDqVvwNosbOXMgAAACgNQJgAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAQ6cc+wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwKgB6AAAAAAAAABAACACAAAAAAAAAAAAAAAAAdjaX1a8kuqMg2VFIAsjYgiJQAAAAAAAAAAAAOzgAkT9Pd7wETiNzS3jYheUXAAAAAAAAAAAAAIVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC30gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANG9HgO19wAAI5wGA7AaNMVAa8WcVwAAAAAAAAJYYQAK14CAAIYxrfERzQqTs7O8RdgAAAAAAAADDABBHDBDDDDDDDDDDDDDDDDDDDDDDDDDDDAAKCCToiiNnQAAAAAAAAAAAAAAAAAAAAAAAAAAAEwz+EwY0wgAAAAAAAAAAAAAAAAAAAAAAAAAAAAyAAAAKQAAHwwAABjQAAAAAAAAAAAAAAAAAAAASQAAM0gAAMoQAAM4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/xAArEQACAQIDBwQDAQEAAAAAAAABEQAhMUFRoWFxgZGxwfAQINHhQFDxMGD/2gAIAQMBAT8QgeWVANs2jLI/SAAsbtbKxLxSnnvgNDBmQGLkH0iYLArb9bIDg2agEaHdCAA7tWukUZlq78qZmGj2EOQk9I2Vj9Pp8Q4wIHQXJ8YP9xWXEtfv2lTeikqZnFlebKxhvfQ7wFGPKPXCEDOX38HlPPT12mO0EG2wGgdyVFI+OFytztEwRnCWXu6LpTdAops0CGmMybhyt1hBLxpyNORhLIZvVg94WOP0ukAVtmgA6D8AJDUmPh50gRJttjle8iOOcYEHKEk8YRDziepPOIT1cyEFIKsz1C1gzcPRiwRm1d+ld5cC/KfEAC9JH4wkSJ7gRtWbf0g8hhVZCMZJhkjAAf0iQgCKkhEhCjEGoBpnH/VTRvC+7C2Mb9GEV5VqAe8d95HIkdoSwCLVexL75TAlVHxxK7O9ueEQEuFqVGyAM1x84XgEgBjEptittlJBZLErRxQh3ifN5xhdxVvRfcDARjGnihQPjwz3R/hKTK1OgHURKLA8hLHnxKQcz23zz26QYNu8NRxAA4D4GAWSMtDfUVhmj+Brp9QBgsA8EdQ1tvCBjA9iO8SGwJ0S2G3bKHGF9tlXn1lauZ6n5gglQXw+HCZQa8lQRxcr7a0roou+HUXCIILMkjaWvkLf2MSqfUmh8rKURZQGVQFFIy/CIRZVqAe8GJyJHIkdoqpVvRfMAbSi8NCjKwsPlTF5cPznGBiMVQ6XmRy8W+koDmV503ypLG22A7b+vxKrQBt+MTJhTogsWB1Eo8uMo2716doDyR30jsRitxtfJdhnAsuyxq0FPqNHmXoPQSsWZ0dhgsjsHfQOYNecGt2gGwzuBsYYAhgXzC/sMO48T6JAUcjQSx0mRmONFph/IDM5/A/GIArGAEDmGN1pQHtWj7RAHw7wVM1IjzhHRwHdX5ViKnusCcqx+mJGX0e8EkAMZuCAFnJ8GupgFjaQOJgAhiEGf8h94QAc3peMmIZfR7+hoWSnnP0AZUZ7UyrV4dRbgIGXIxRQrDsMYKydDLeakdCDqlCC2+Vmg7oahSleR6HuoAUbO5v6Rw+9Xyx42+IAh8W/lzPgjpmit9VqoHuJ33F9qayzqIQG+PewuK7SgGLUOpcYoGN3kbDBHjjlK6rILkLZF49o++I5Icw2+cpe+COdZPAtHBBJLgAORbVmY7w9DKa6f3znMRVkeCNasW3odS5crMXsW9WavvKm9TRLvxccT2hc7NZZnBC5ioyCy3ZwVhCGoU1HdCKwqve1+FvmMQBt8CfBe0kH6bPQ0K/QVtfENQvM5eZJwYKMwAp0TQbKPwQKioLnUi+wJc8YBIkmUuDP6C3pY/Q2raD9AQQhegRtboW+cFhFlTey95AXhgBZznenUXgtLTIBcq69BGgZuonzfDJWDdHTGPPib4N1hgDkE7ClbZWMA887Q1utRPgwcuA+UgOFgE1XM4b4VvEAthfRQ2ZEjQnnT6gozAj2QovEi3AHvpHBru9F8w1hBN6NuMLBN3eitzhrCCb0bcYBABchN4AIW+2N4FE2EIb6Pq+EopxjqHf/AGqvLLosW3cInFEblDAoWECEEYSggjCUFiIJXtwgBDUsxbyAAAGETL3635xEGTWx/wAHKEUOXf8AlpcDlEbhqBygqS8QAMqw1bxvp8D/AHtNbAAlmFUiNC8PB+jPC1BFQuCYdiMDcGsYITKSjWOMZV/4iwTx5z0hAoQEesaWeymw8ewjywonHZL8W2UJyMwqfsA2CVEsYIi9AeMGIkRiP8nFEDG93vILwSQvEBljSqp21wwBSpydM0qvHdetsvWpULGqNLZLHN2lYVRpuwQyvlW7gSg2L0LGeCxNLweuNaYlvSECCzfZeHzB7o9JRvR0ggFdzwlBtCiYscpUzehy1KVU1KbaP1+LZGFQGjgJgQC/ooDXlPmMJ/NSJFiB6Cegn5xie0ZMYn7EoxofsALwAfxRAmAQPYxnLD7LrLD2En7r/8QAKxEAAgECBAUEAwEBAQAAAAAAAREAITFBUWGRcYGhwfAQILHRQFDx4TBg/9oACAECAQE/EIF1kV+fzC9FFFF63fVHEyvFPNZRTATse/YwGjxK+fqABmIelfm9Ta8KEA3r0JB6jiYzIPhH8zhRvkgFvXISg1HUAHzARfp/vcDCGJUdQbrlj+ASgEAKht23rKCvQEBhmCrEPB9OVkaeH4RcBAGfncQJ5nXvB6VpWJdVqo6uLRx4RjTszAA0VPNSyFx53gBAAHlQfmoyNpaJvXqSSNzGRXj9t19QuBgZ3vuICQEYdgjZCAKMP97tsI+51J+RJH4DApZuIt22rvKi4iyZxaXgAO8IEEGx/kzPL/ZcNQj419DaEkmfWyf2caWh7d6py3+JTGyqcKDAC19fRRaCgheV+4TPF/v3CGjD+K0aN6De0FxJSYHX9JpGIcJM66nA8cP0lEaxb7TL23aplKHH+j1tBOxI7QAF0B3AgYYuYA1YcIQcFARj2mDXtfbHKCg7EHoH/ZRJOT6gd+MIJJwvGrpBdpLmWBgnQAAdyq8qwCCrd88oMr28rlGY0UdQYUJBwvKkBW3PKAyhw55cfw1F9biSHyIa3PqAXn+xyTBaWyy00oYI1X7eOBADnfMr8iCkDmW6XwaRghP9RTrnlWNiCOaHYp6NQhgZHwB7QhnYgfLvjywlonG2lC6dYNa06AfUZehTuPdRDhgLkXUwwZnbSlQs3jbu14EEXvAAI0AJyADckPnpUZImRwsLjPtLqoMnOhL3HWkNhz/CIjzgnYkdoKzv8B7wF1URA3D7Qgi8oMQVYQwAx+n1ouIjoNftebSgowkApWLAA7+N5QCaYDv4eEJqeF9IShXNefUNCjCQofxkBcXyTtCQX4jgoERvWGzjBVGgTrfveAjNm3KvEwUANg+IvbNlbmXG1QPQuBNX3hmMAB0rHyaxANIuZfqIwAHBwqOxCGwh6H3DRA5fMCogAjYA2L/kdNbiCK5sIGIzPiR+d4HIEaC7qL4OkNBkO586YfjASHh4PnlAVdCjxvACShk+oHeFQZPqu8oI1lh0gCocO8FYG6vbeijuvuCJDOnm3qwDN52gwJYQmM/zwQwhmua7IQAJyD5QoKNIxVoPortCCMoutoEV5yvoQgMwccKBxB9rqj0PH4N3rGBBdH+oadRl48oNVj2IajQCeoIfJyodl4PMp1SFAcR52jBIfAh9OkqCTc3mr+gMIuQcUtou0y2EL6bC3eUrsw+DD6OXDM5WNtGngeRjkWCngz5PvnCNwHsfg+UYcUdBZxY5VWcpTdl7m+YoQ2rNmCOfYpI5UcFgPLx+cYAA2pvfWnGAMqMtwnRIVFYvmYGtvgQIi7g08DrzhIc7hNRX4CgUUhEJsq8M3RdFE4GuFUW+duSUQQwJPucwFKxLE3ldQ5l58cowywubPYHEmMdGWyx6utIDBxmPutG/bUQ8x9lQ2eb/AKCkB/f9lix4woSOc3NjhArxHVqKxqs+sAgFYviqA21JLvlRQQICDa2+v0BP0xXL0F6X/QgCTapYvf5FuQpEDN6DwQI4AknwQiE7CHD6F+glMxL5Gg2S4wmOcjoHM/RgMwnyq4RWGELmQ3VuUEQBgGoYJxxpQ9IARZvtjKgOPQPtXFQ0AY/aiPElugOwdYIXxi+BHJFxAGAHcrbxwAAkjFu6AgFEJ6NydKcYyncOoJlWuA3A/kCIFWXV0OtJVrgNwP5AXOwLcCwXwoYQK4CzwRI6gDnBEJxDzRdv+2TCzVVQb35xjwW0jUegVRON/mFiBxjAQcZYGMCjHeEgkEtGywQiSK5lHT6bYQgkHVPVeGCpmL83qcZYRnGolhGcvFo4k4mnKAgJYW6juf8Ato5AGL1O/wBfpBgUcHDUuac1o4wEXf8AiAFBgo88y6w5qAL9QsN/Z4ZfHzA7gFxg8ODQcgTyGhZ8/S4BEAkzphIMqfIPRQWnoMYoFNBCvxH/ACU2CeVlwBC1bMFIlPCtFBikMVUbxFARXJthYfFL5+oINTJSld2dlDGggUJpC7NnvilKtnVaosti8sV7KrVoVMCNTXWXEElhPL00ARRp+64TOeEUpRIwe6A1uF9/MOOOPSgyClPXqQYhw/X4NYgOExfz+QGDKOvGX3AS0UwH5X6iAPzWjTLCZZjR40aE/mpD2kLRB9jkIwvYCWhIfijJGA+xCe0Vt9rD91//xAAsEAACAgECBQIGAwEBAAAAAAAAAREhMUFREGFxgaEgkUBQscHR8DBg4fFw/9oACAEBAAE/EODJrikWalaWyWn+oXZf0adlU7FAShcA0BjaE34ZskUBS1myOCH/APmGxElo1J9xMARAJiDCCpqFw1+YuA1RJj+MGvXn169evXrxuAOVtzgGWR5SfFKB7MZdkOxn4TATgABgBhAiJXozaECAqQ5JcN4YDGkIMw9cGB9xpRlcJkxkphCUNj5g2iAmRqyxtXpGaJWTT6GIbjhhpJFcajGSmDiUND9Z/IAmR2LGqTJiEkPvsQyhDAmphyQobGqzD3IhrCBEPLfyBjVN1yWWEqGaf8iAt2aT6IjoJqklnc0SnTqzImyMur1ID4E2ib4haCT48igALiQ5RBAAAQZf4Ohf4AyAb/8AIRwxHwAApI2DAAAEqFMAr7EjQQzRohcrADACAt1+rBARcUtLgBS7EiIVAQADkhjt9goCYAMG1hH4BDMAAT6KakAAAPrSoObogQACazeAAAYCeY7sEhFhS0ALPLdBZeygAAcDJ5TKBwAA3HhJlQCAIRZ/UFQYAAApVOBxAgrADIWq6kKAAKE5R8vj3QJPJhmkk1BSMKAAAW3L6YACIBKU/TtpqlLhUnn5HTPnh3fmEgAOcw7DxCQDIiijiCAMUTtox6NACfZpAAEAALxIZgIA2UlmCEQAK8BW1uUnILB2ERAAdXoSdTmSDBBFn60/wYBDjABBIcGAEPEC5lyKABNODwJOBAfevWEEIAJJK4KMx1IObQYqwTWwEQQFJVzNCBIBML9BzXMalm6RsDI7j3YRSsMggDKY4pZ69gIEuPF/AlxAPeIznU0Gj2CQOyCIEAxsjY0RzmV1AHdq1gHAWA4xJyHAHhhhwNXSgRyAAFbYLgLQDVG2axOB2AgI0+OjEaesvcqEZwZqSRkAALqdlgeiAAtgLct8lAG8a5cNMscI34QAIyVpsZgDdB18ReCAAiBELalsBD+IAV36gCEAAyEEJAR+cR5BCIABMdhIQFEaqmICCAJOOqARwJsD8eBSEAIADx5ggBAAANDECIHgAAnhjx2AYBnGk13oCIFkCyDwEbiwBU5KsWADsOYW3ViUgI0jWDICmlc/01TGmTI4YyunCo3sEag5RNnRKF5NxoTWjTT7gHIKjpuK0hIQG3kRdmb8BCbyQCKRi/BHNtnVIjVoa0QvY6dipa/IWupEKU0LBk2NDr8nX5OvyT3nudXk6/J1+Tr8nX5OvydfkhIqXpxSwQZqLHkoHCsDqRhSwkkKNjqK0AEwEEcuACAjLHjdwgAAGfiLyg/gBQZHIXISRxCAWHX02AAAoZobSQcAAAnHCwEYAAI3VPdMkQC81bsX4CgTwBiGiOAAHsoU5JiCIDkiaApACQPvOIFVHMPHoj5JGCA21OWvQHKE5QnKE5QnKE5QnKE5QnKE5Qmo5RXs/Qy0MrxEysicAJVBphlTmUZaDy2O2NQX+DLWB17gAkUUOCADVD2QMtjzZNPZg4O3RpC4VHGfnblUiN2S4fj+j6E4puHAJKu2hASaqUaf0WIIGVGIWdBEpNxIKhAAPcjEhqn9FAmg3MFwZSCkH4AF8AQBAACGF45MTGOYf0UaDzKJ2FdA78TEAACEAQARurCQADQqbTcw5/0PQjIaSLDAWy36MHeyZMgAAESBRxe8An8BrqAGnuBpXxE/LGthUW884DcO3gbvuXFZgLQQeRlk5oswWDKClQLFJOoCUccmRaH2L+opgTvREbI58RgDbnZtUzUqBrwAAsBAqUVdjyuGvDQjVoQk2obfuHFRgIhobUjpER0xkxoAoui0H8eIkVIeWxchliEjUBKGVUlSIGpl8i1roQ/kj+4J/eH6+oCCEMDgp2SJlgHgHQ4bDyBfRACljDPWVCGkIkgfVe9VwMAADB71cpjQ8wAA3GGSYAxhAAB8zLIwlCDSADSBmfQThggEOoCGIF8dhECFgEiIFFoABn1TnBFCAAnfjqxQAIJUUpvyOdYAGxXjYLZdlZG1FdiC+QaByGwBgvHLj7CgB2njxE1ZCNREtGNqdQgAJHgIogrbqhh0dPwSJIPbA/aSRVh3YGgAPf8A72lBmoELh5xFwYgF3RXbwCASABmPLmkwBBAefGxXCHYvmFNubQpjkaAM0zxulqYykAAOatK0frQGQHfcXGBqrAGGSAUpdEYASSV8DvCgJiABCMqsUIAA6L9TT3qAFoDNcCb/ACLQnt7LCONhJ+BMeorb6TO49QypApFpMBs1AQIYs7ymwRqDcCHdKkaQBAJIP4JyggCAGTU7jVac5kAl0061dg7WE9mMejJjYjKgBOwAAxcl0KAAAAy+CIDQIJckmm0QuAIdGEqxkG/yNqhQ4JAevDxNAI3QdQ5OlQ+3QPGwtIYdRNgGNuV4SpoaADQZfWD6bItQoADtswQrNWwSG20H2176hKABjXKmGR1RLgAQGWTW4kloNvAdJwNSJZEgkhO6IsEsJj/DMVpugUhkaAIPhI9RABpkIEUXwIAHXBAx7MFgABLguS4QIAgFliVmgIAdsw/NgqIwEvoDUUQggo3MkOX8i/M4cOELApadTQqFMOmNqqdwZrnqnBEtnQoOQqAIUZF0EUCuLO/Sc8xQyJEISdC1atgI2DyAJJLAjBsI4KxLLgClgcQAREz9zCxB3gjuFhY0VVIpaSSNEZ0Jpj7VMCph1AJD5B4IPqIBO10aQCxfUCBend0YqAG4pDUaOtoEAKGNbAnpnaABPjFwMD9hgAgezgg5AADKzqordZENhIJq0Ku+dsgJXoJAMdLX0KiKoAjmipSwA0OIeZIoKHvcgiGG+eFAM4pBBQLSPHkJD/AUABbuuithkDKCQr7fJAI/IICJIO0EAaa3AArfrWe4Pkag2JiskQIM1KBAvWfER4JQMksrMkkCVnQPB0Bdi1NuliCBMD/ihxEkgFJM0qEPArUoMYVWN+fgZEtABIn0ORshuvkUqk3Jo/Am02ROQ8RMIT2tCjPCFxqBtbCdUJDTmtuDSeR4dWqLkEQkitO5Sb8jcldUG2Gb/kk9dBJv2ERZ59ELbjr8kfowlXQQIGqaYXt0ZwEAMSGQuk/xQQafik7qRQIA/tUAwjcRqAKDt8TUBBgPcTRKKtJL2PTQHVeYrxRjJKCDjAmm/OMBG0QTGT7xnjkQBjPsZQm6gFgjVwbCAZgABLBU/VCjCH1x6CeD0iSdJVVZMQNjEQUFWSSjCS1QNgHOt+pgDkCgwDhIemcZgIeDpAgcV7/MWYCSZ0CMMjGaRhPVl0fmCjC/hgwAoyre7YRICalPSNIbJPsLCSu5/atWb0IYDyzosyhoAEyRNkJm40pl4FwizWluRAbBLm76ZkiIb7/KdCaMLL+OpyAlMJKjV3OAFyGKw9X5O5EBjYEQog1cBgwABWCpYH0ARAQBZaDhh3HAEAD2gRktMGwTsphsU5Q4ABBABKx/SEwugIoBjiAkUAQIQY4WsKQmdSIYAQggmE1KWC2+EWfK3CZBSD7EeQUyABcsCTldfQA2EJph76QCA+BRVPeFVexgj9mIwGWBaj9pkoBI2Jn6IDVwCu4wERosCBAi6svXsjL9AWSDDkfQs78ASIpg8LBcd0ACvDj5ToIF+iBRBgSstVA1RHVrwAE7iRoxPYj7JIGwDZZc2kgaTmOqB3vTEOG0CwgBpc2qGNg3NIA2VEUZv1xOgqRlxhQWp68TD8HIAHANDNwAAACYkzAUCH5AAj1ZomxuoAHIooRkqBAEloogBAIh6rA6qmhIhP1LtMH0EwLM7bJMmAIGg8RA5EEAdtoizNXq0kZIAibLNWtURQAC62niNkg9lL3yMIgAA/8ABsyRgGQClI8wCdADg/gUN4dIJHcRm3dQCACP0GVpwKQGBZ7oRMOIAFsLQlp/pIAQs3lafKq6E9xUnMjUdUJZi2TPsxIbfI0yTrwjJAi+MVZIbxicDtrbaISgSkaoquNjrxSMgHrhbelpy+hEz0NPRFyZYsELRnM2HJiMGyUkSpwMTJzoPVtZTRz4xkO5GnBq1yQ8rOP5kvIIu3EB7AAfUr8QsBR9gAzsAIkBCheqGAQhdwdfOAmx0/4BcwSLxBIA8JsmTYSkiyQwB+oVAvFBJHeH/ZCUsCkihry1sQDJNh+IKAGQAQlgt5pAbgtQGk+JBP8AEAkP1UpxEgSClBiX1nImxEtGCHIDAyAW2q8eMp38AcJJ4zwlbk8yb4Nw2J/G/NaHmBABNNK1wlbk2LuZwCQ5gESAKD9BSSY8pYawJGAGRfVhjuKiFhhKrAGGGmX1AJ8qJdgI26YCbfKbwLNGsAG0pEjd2xsvuSoAMI8aQIAH4A0T/EAbVU4SUAO4ZsQToQCwCORUoKQoBkUFlxEIi2AP+mAY+SA8Ctw5pHaIPSCLE3wN07L0etwHACbJW5K3RMoaXWeU8Ohds6jfrieOABvLhPP1ZDu50x+ACARBvBjsGYGi24CAIoVQowU5wLO5ygsawrRSQh1Ang5s/TgVqToxALiUZSUrLgGlI8lKAE4rKOWapMZHY8nCti6D5uHDohOIgABi1CKF1HFDA7HT1Q30UQh3pJ5KJQE8rcQIX+AMAa9nspwAAY1t3o8ggjhP3q7snUBDeV3AsNAQxoBty3ZCzVHMAyptvSG6KV0ACqVFMuk3hqORwKA78oIJoDD+3VJ0wBAA0f8AUCUYQA5WzxyYAECA+E0Ly+5IAI2ykPFwRBuPAADTgIgJAB0z+u2ACCAvacZYoYcjWBQYJz0WuFECMAiMsfeIEAABfYVb4xmGaGSMg1LMqjA9VMiCwM16BQS2CE+TncjAIXU8jBSADkKSo1Opc70iDAIKu8lLVQsAv4sRGQmrIEHBWWX96cgWm9JxNyYIACAQceUYAnQABtIAYgAD0H5gv+RwBRpMWWl6mwFAAhXk7JUjzgABAD4SzhB4gbgCQXE6ZJAADN9IBIigAAGS7lSCt1E1CQz9ok1Y+wnCAEjQhAgAADyS81Uq6BArbL2KgCq67BT52ABznOBAAB4W3Kn6go3rCCM4gRG69OgopDgIEEANq1iBoYAEgpPgWypEx9gTbeschAlDl9n8OYYAL60EoAAOgFlgiCAD7sTe1zEABBGZREHcAIut1fYCgACIllIYw8wLNcENA2AAWLHVe1QSA1sJaMXQ05GIHGpIxXUXEOYAH2DltVABoDXrTwoNCd0aECpNkMIAELoySIETzAFa4AggkC18VYCAgEYEtyJCACC2eUesgBOQAcv3IByWW4AA2T9GCCgTAAkPcnBEYvYAAOZQV1IZgKKEmtVRcw5gAdmUopTdyDkAUq4JAGQggAGjr6D9iBABNCT29BUDUIDeWnQ8zgAAHUTLPGugAAnV7DGTjFkBkA6K8KSSQAEHVXQrigGXJgqgBkSX8xHcXtwyuq8QgEFxA5DIJkBZCy0MO2bWTAABTEe5iJAOdIdVHZwBtfqF5BAbC5KUFDXIPgCyUhAfkAgGpA04aGQQlQaVAIAOZnQn4a5hhgDAAhGGquWG4AAh0z1pIDAgBoYDyBGLOwIQLo8l4w5I1A6L96NCiXARgHyAUQAFxOBGPLCYmIgu3XygAGVgAvbiWdoJan7i/MmVYlCAChq4EEABiY4aRAieYA3fVJGrAhpqGaemOEUIsjg1VCULi1Shq/54WxHrj1R6YWy9MfEt2LL2HEAYCeDli/8ArIiUBvYnmaDEZg87BV9wTK3+fZQ/XDoL+QDQvpivyAESJ7BAZsOl7HvASvsBQHkACQ6OGVqIBAAUn+jA6FAEB188swMg9IAAVR1JAJ/0ALgXN88xDAB0fwiziUH7wyFFL0NPnr4tf/fgYABZXtTFbCG4RNk0+h3HEK4c00AiqOXVf2CQAgvDejDAgBDoFIYyicc4H7AAeH9JpfQCCp0OAlVvn7TmuObXALAIk5gRpZFIkOEQKICO/g8wCABUlCGeSZNBuqoiD99aJAwMhbfP4X5BxuMO2e+oDBK3F3MjEMTK4IM3bYOtQjidDOoDTg2ktBVf9cEvYQIrhUpHUelgUVGAn04YKi4rTsWT1E+jDFF0GGQNqEabYRMekAM3d7iXcgxLeglJrxzd3uJdyDEt6CUmvyBRu8q4AOlzAAO3LOqIE6MsAAuF7bBP3AABKI+HIAyEKHNIJeMgXkAQACa6OPoaw6cO1KZBAB7hAQ1nGQb/AHIHvpcsRAQAATh5JvkIAEHIGgLhDsXzEeXYAqOjIFJFZHBZIHh+9Z/4Al7xRAilFeoA39zeaAUWRgCN64gE7A57iWBUnEPAWEBDAEsK001oJJMm5BxgAFVLIWGrAfgQWs+oAIaVjFWQNFeTZ2qK3CAgAMJ4REFFQcgYkCet/IMFgADMtipcdoOQQbDRevjCgADoQlaHACWLmgg0N6pzEB7go5MrC8cIcLAppiGpZpBkQARwEihZuSYhIZCdGcL9KG+xjsERcU/BSAHQn4bVY5gvcgKVDxAUOgQWGljWq19UchgGwJ2aKCHFsjkn4bVY5gvcgKVDxAUOkQWGljWQSnUchgGwTFzAANjYgAI9ufCLgANPQ9IIMpm04G9UsvfEAAV5c0CR8VQQAL6IbdDUlZyQBn7ccSUAxwCB4gqACAEJUn95CUncAIZPrwDAAwELh6SBLHoaCuFDesGAICVoxLJNbntzgzooCGwDNohQYBOhATIDwDQBoJITuiAVKaD0DVp7IB7q6mxXdzCAAALIs1HjgQAByY/jgiQB8y7xIOzgBR1tsKgwJJGQH1SmCiBoAAAgIpp0gM0IpBA5Ylp7vkEAh7sAvoRDQEc3YROYj7hzYlNgxiw1AChVTayyDKYJxm5g42yMMM38cPSQahgWi5QhjSOoAFtptY8iPqAeRlz9IRL6yEOJRRqX2BO5K/mbKX7ICAQCY7T9MFDn6AiX1kIcSijUvsCdyX5vWUv3QECIZjnCGf7Mk8eKEhjAaUmDKkCDblWhtMgQhSOXxQSWQeK7gwMixqoiKZEf8AoIh0F2l0REDJbsxQ8KADWGtHIEBSAPyVAs40xhBopMm8aiok1EUAznsBozEH/KFQAjBYJRBnSzGtARV/JvWKAQUCQwVS0KpUACEqQrMDdEVAFVFtgeAAB0AnJ2Z4ROUmwSAhNOggDBW1YKUEAv7gBAg2D9HIkFMw7nOESSYVFwHB2gZuAEjfg9ZOwATTNOAoT7AGnUNORVOCJKSWxMGagbsayM5t5WwXUBiGDiBsSYAoBDHMEqQEn9AJBA+fqdEDBBu3M4eQEEczwQwwA5CHg1ZKktQRsJDSA5goIbgqX9zTKMRAcACwTgqAAAACA07UwqZ6gzkOmNgDyAYpqwUhPYCFAR/LAoqEABGDjyUAwi0HYl0xsAeQDFNWCkJ7AQoCKmH81UIACyGZxjBiw0BMsZmBOIAo2RDywA2CVjfCNLQ44DuwLl9SjiklkDT4lOR4EWaqlMZND1AnBncHnA9a9DfBV8FEwJalS+EqTKMGWdCL7GkG5SRTQ8C1Q4IrcuP8MoocK1SHM8qGaIeUOU3KfZL6wFamTYpLjqUkNSRgOL2SJ5aFQ5FS+fxOgsC7mU9APC/NDvtM9AQbuMHAjUHkH9GewkwU8rZCDcJy4jCIEAHC4gRAAAGn9FW5QkpdESSuHFuABhICAABGUMIAFROJgQJ0GijWTK/ojbhLcdBN2Pgpsejgf8CAAE7oZZmOahbSURhf8Ahzcbczc0vhHr06dOnXr169evXr169evXprCGwfHw9evXr169enXr069enSp069evXr06devTr06t+DU6VOnXr169evXr169evXr1igSi5cVaFWSBQgEKnt3qFEBoGjvSw8AMBDZHdMYAIJdWLPciQBAz8F9A0A5gEKEWkhSXIAJPFT/89QIBgWFwYQhna1DOPcKZRGwwJAGJqmYYZgAAJv0rEwCDYiDFdlaSZydAvS87wmgaRliDD0OwQLtVzEPfc6BMQPybyAhZJoJQRtkJDerphZzAdyNP69BGAsJQGaDZ0RiEVbpACty9AmHypzqAC3CIjlBLsaA6xHcKEBpSzAbB3CPP1PxoshqJGgCSHQc3AGn+aEIfPIGPi0KJ/u3JAQB+kDYEBYCTM01ZBMWrYJOwNWIRUjaI1AYq4s11Zach9W3K2CgAAaDOOIH6EA2cEtSgAgNRX/3TBCuYQdtPAEuecN21ER5MIUDx0NEswJsRrsPPwW1q2AQi30EM4ogsIiM8ptj3A0QXpImsmRoAFDDsZRqBqhQBYdmzaJQk/ooDQVgQpQCDaghYXBqvlcuuhaBMgT44gor/AIAdXqOMdijcn0CEXJBEgBqE6jX0+d4TWr6CEGVr2XdxgDVQQSFQABORAdgg2Wch6JAABAABoAwgAAAdh4SgCA/1wmyUDNAOOCggKEAAGwDAIAAAfCO0q6qZWwWnoARDAAJFCAAAAqoxCgCRTLqMHECAhgAAAIAgIAE+BADAmNOk2DjkpQQAAQEAAABMo6BHogxVAADAMgIAAOAB3ZAAQAB9yFuIkJIJAAECAgAFAABAuOuoikowR6EnOgggAWAoEgASKWAIAxliMIsNV8C8MQ0jZTgTlRnkDVF1AyNOxew38eyQoAIrOEHH9hIE6x8eUF0PABrLW6DHypj0mqxYaPUKyGRzOleT6iCCDdK7I0x8/UcmKj3/AOZ7gOxuLokV/wCRsbupdXXX6IoK1Po87wlC5MrAIAM0sQAC0AAQMwoADyQEHkNBOb44pIgwAgAWAkAEAIBwQwgAAdiXvgjeH0HLirVEgEBAACUAIAAAAAgTjaXM4S7RAY4HxggAANhBAAgAEJQAIQMHLRBiHoCgAgQAAIEDAAAbSOBArs5DuTg2QMAAkAEgAEio4kP9gDgcDoDAADxKgQIgABqh5wMVk35ABqabFycASxVAAHAwoAEACIwCSKBAQzzgALAUBCBAAAQBTE9IZwDuIhyv4F4Ll7u9xNeGfGpVawZmoDsA4xcl6Kj7RAcEoiYw0IJ3AvDW2wnYIgFUQPjwC5Ho0G9GLuMvQdRG4kKxICcIH3E8LuGc5CEYDECT0HvNohIOAiHFkG2JVp3Ni/NM6S0zL/SBYejzvCS4MkMMLVsjpCj2cxYAYdHmCfCiEADABJpBDwJQAA+RA5gQA0QCCypk2gODhYkh4AdKSiCEQAFpWMJh9AEGXJEl/wBxgEABKkSDQBbSAcuGAJGANZLJCfMu8Cueip+hCgdDtgAAmMKW3CrJobAZBr2tkFzAFfAlJkB6EoqNEJCkoMONAAAPiQm5gIIEOoQKAQzJbKtJLUfcGP18hIAF4vdXEwIAirJpsOQpacWkwNOKCh7BngQgweBAEviUAggUvU+RPECBOfjYWyIUzF/wed+azfkEsX9GaaaYYYaYaaaaaYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaKYFQkWOPc14zmpHnmZRkes8G8ZHhGr5xgJ2qOHNYIPE5gIASgEAEpIzGQoAAMFkfTkZlkteQh0+BHvMFFkIGTxAwASUw+8GQ3kLTAClzH2Af6Vw8DDAAAaUpr6lQDAWOAO4ZBAAAQdgEngQIAJC0NVyYGEJ3AB608OCHUgEAaCR5D2NECATX2lAAiDgAETOWQhugNZ3+cMqq1GZsAkAGkws7QCAkWBdugtSErOoAFSz5JHQROwARHBUYByADIQQdqMAWAcad4fUdhNSkai6MCjIKQ7ByMpanYuOO0BgDECQNuEpTEF0H4AR0WjiYKrBEMgQWiV5zeQQAC0RAoo/QgCw3G10BiszcwAqLtQTAUewaGXrJIMTjAAHz0ISFBKmP6JDj+GUxQlT/BkYW/zR52TE6N+30FoQlCEFpTiRbp+vQiTto0KYSxKLyZIjyMJwI3iAaKADQaerNDhAAPwsAApyrl5FQAn102uGGGX3GlODXFK0i4yuUdZb2DN/KME1Cgn+0vjGkaBD0wK63dQ8hWvWvozr2QtAJ3nRdjBA7dsTawAivXY0Vq0Do+GIIQF0Rs6h1Y05letsblCMrEYHNK0I97ewknLDIOh8pA6hDuUR/BD0If8AGm5h/wAGX/cv/9k=')
# Hint: content_type is a MIME type and it should be "image/jpeg"
# The content is a base64 encoded image, one way of storing images in a database
def image_file(request):
return HttpResponse(image, content_type="image/jpeg")
| 3,186.526316 | 59,988 | 0.969724 |
b3ed55ba5734f4f22ed1b6a1e701fcec0aedee2c | 4,374 | py | Python | docusign_esign/models/app_store_receipt.py | dennisdejong1995/docusign-esign-python-client | 4255e9b9e8f89f5c89671a7feee4f86d32317abb | [
"MIT"
] | null | null | null | docusign_esign/models/app_store_receipt.py | dennisdejong1995/docusign-esign-python-client | 4255e9b9e8f89f5c89671a7feee4f86d32317abb | [
"MIT"
] | null | null | null | docusign_esign/models/app_store_receipt.py | dennisdejong1995/docusign-esign-python-client | 4255e9b9e8f89f5c89671a7feee4f86d32317abb | [
"MIT"
] | null | null | null | # coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from docusign_esign.client.configuration import Configuration
class AppStoreReceipt(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'product_id': 'str',
'receipt_data': 'str'
}
attribute_map = {
'product_id': 'productId',
'receipt_data': 'receiptData'
}
def __init__(self, _configuration=None, **kwargs): # noqa: E501
"""AppStoreReceipt - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._product_id = None
self._receipt_data = None
self.discriminator = None
setattr(self, "_{}".format('product_id'), kwargs.get('product_id', None))
setattr(self, "_{}".format('receipt_data'), kwargs.get('receipt_data', None))
@property
def product_id(self):
"""Gets the product_id of this AppStoreReceipt. # noqa: E501
# noqa: E501
:return: The product_id of this AppStoreReceipt. # noqa: E501
:rtype: str
"""
return self._product_id
@product_id.setter
def product_id(self, product_id):
"""Sets the product_id of this AppStoreReceipt.
# noqa: E501
:param product_id: The product_id of this AppStoreReceipt. # noqa: E501
:type: str
"""
self._product_id = product_id
@property
def receipt_data(self):
"""Gets the receipt_data of this AppStoreReceipt. # noqa: E501
Reserved: TBD # noqa: E501
:return: The receipt_data of this AppStoreReceipt. # noqa: E501
:rtype: str
"""
return self._receipt_data
@receipt_data.setter
def receipt_data(self, receipt_data):
"""Sets the receipt_data of this AppStoreReceipt.
Reserved: TBD # noqa: E501
:param receipt_data: The receipt_data of this AppStoreReceipt. # noqa: E501
:type: str
"""
self._receipt_data = receipt_data
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AppStoreReceipt, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AppStoreReceipt):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AppStoreReceipt):
return True
return self.to_dict() != other.to_dict()
| 28.776316 | 140 | 0.584134 |
b36cb8522d81bb7a5c37e97fa9f75621b4769f47 | 1,286 | py | Python | tensorflow_datasets/image_classification/rock_paper_scissors_test.py | shikhar2707/datasets | c034a193967d6d72152196708a5638e546e320f4 | [
"Apache-2.0"
] | 7 | 2020-04-21T01:28:30.000Z | 2021-06-13T07:23:33.000Z | tensorflow_datasets/image_classification/rock_paper_scissors_test.py | shikhar2707/datasets | c034a193967d6d72152196708a5638e546e320f4 | [
"Apache-2.0"
] | null | null | null | tensorflow_datasets/image_classification/rock_paper_scissors_test.py | shikhar2707/datasets | c034a193967d6d72152196708a5638e546e320f4 | [
"Apache-2.0"
] | 4 | 2020-04-25T15:48:46.000Z | 2020-11-22T20:35:01.000Z | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for Rock, Paper, Scissors data module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets.image_classification import rock_paper_scissors
import tensorflow_datasets.testing as tfds_test
rock_paper_scissors._IMAGE_SHAPE = (None, None, 3) # pylint: disable=protected-access
class RockPaperScissorsTest(tfds_test.DatasetBuilderTestCase):
DATASET_CLASS = rock_paper_scissors.RockPaperScissors
SPLITS = {
'train': 3,
'test': 3,
}
DL_EXTRACT_RESULT = ['rps_train.zip', 'rps_test.zip']
if __name__ == '__main__':
tfds_test.test_main()
| 30.619048 | 86 | 0.769829 |
464568ec3b957e961b32777549a6b15af86b8ae8 | 4,927 | py | Python | tests/m2m_recursive/tests.py | iMerica/dj-models | fbe4a55ac362f9355a2298f58aa0deb0b6082e19 | [
"BSD-3-Clause"
] | 5 | 2019-02-15T16:47:50.000Z | 2021-12-26T18:52:23.000Z | tests/m2m_recursive/tests.py | iMerica/dj-models | fbe4a55ac362f9355a2298f58aa0deb0b6082e19 | [
"BSD-3-Clause"
] | null | null | null | tests/m2m_recursive/tests.py | iMerica/dj-models | fbe4a55ac362f9355a2298f58aa0deb0b6082e19 | [
"BSD-3-Clause"
] | 2 | 2021-08-09T02:29:09.000Z | 2021-08-20T03:30:11.000Z | from operator import attrgetter
from djmodels.test import TestCase
from .models import Person
class RecursiveM2MTests(TestCase):
def setUp(self):
self.a, self.b, self.c, self.d = [
Person.objects.create(name=name)
for name in ["Anne", "Bill", "Chuck", "David"]
]
# Anne is friends with Bill and Chuck
self.a.friends.add(self.b, self.c)
# David is friends with Anne and Chuck - add in reverse direction
self.d.friends.add(self.a, self.c)
def test_recursive_m2m_all(self):
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"Anne",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Anne",
"Chuck",
],
attrgetter("name"),
ordered=False
)
def test_recursive_m2m_reverse_add(self):
# Bill is already friends with Anne - add Anne again, but in the
# reverse direction
self.b.friends.add(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
def test_recursive_m2m_remove(self):
# Remove Anne from Bill's friends
self.b.friends.remove(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), []
)
def test_recursive_m2m_clear(self):
# Clear Anne's group of friends
self.a.friends.clear()
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), []
)
# Reverse relationships should also be gone
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"David",
],
attrgetter("name")
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Chuck",
],
attrgetter("name")
)
def test_recursive_m2m_add_via_related_name(self):
# David is idolized by Anne and Chuck - add in reverse direction
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [],
attrgetter("name")
)
def test_recursive_m2m_add_in_both_directions(self):
"""Adding the same relation twice results in a single relation."""
# Ann idolizes David
self.a.idols.add(self.d)
# David is idolized by Anne
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# As the assertQuerysetEqual uses a set for comparison,
# check we've only got David listed once
self.assertEqual(self.a.idols.all().count(), 1)
def test_recursive_m2m_related_to_self(self):
# Ann idolizes herself
self.a.idols.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"Anne",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [
"Anne",
],
attrgetter("name")
)
| 27.071429 | 74 | 0.494621 |
3adb1c39c30837e2516fb2b85d701821a996f29c | 2,060 | py | Python | bioblend/galaxy/config/__init__.py | kxk302/bioblend | a0af6725228c1c6bce6bda2994cd5ec3199d12eb | [
"MIT"
] | null | null | null | bioblend/galaxy/config/__init__.py | kxk302/bioblend | a0af6725228c1c6bce6bda2994cd5ec3199d12eb | [
"MIT"
] | null | null | null | bioblend/galaxy/config/__init__.py | kxk302/bioblend | a0af6725228c1c6bce6bda2994cd5ec3199d12eb | [
"MIT"
] | null | null | null | """
Contains possible interaction dealing with Galaxy configuration.
"""
from bioblend.galaxy.client import Client
class ConfigClient(Client):
module = "configuration"
def __init__(self, galaxy_instance):
super().__init__(galaxy_instance)
def get_config(self):
"""
Get a list of attributes about the Galaxy instance. More attributes will
be present if the user is an admin.
:rtype: list
:return: A list of attributes.
For example::
{'allow_library_path_paste': False,
'allow_user_creation': True,
'allow_user_dataset_purge': True,
'allow_user_deletion': False,
'enable_unique_workflow_defaults': False,
'ftp_upload_dir': '/SOMEWHERE/galaxy/ftp_dir',
'ftp_upload_site': 'galaxy.com',
'library_import_dir': 'None',
'logo_url': None,
'support_url': 'https://galaxyproject.org/support',
'terms_url': None,
'user_library_import_dir': None,
'wiki_url': 'https://galaxyproject.org/'}
"""
return self._get()
def get_version(self):
"""
Get the current version of the Galaxy instance.
:rtype: dict
:return: Version of the Galaxy instance
For example::
{'extra': {}, 'version_major': '17.01'}
"""
url = self.gi.url + "/version"
return self._get(url=url)
def whoami(self):
"""
Return information about the current authenticated user.
:rtype: dict
:return: Information about current authenticated user
For example::
{'active': True,
'deleted': False,
'email': 'user@example.org',
'id': '4aaaaa85aacc9caa',
'last_password_change': '2021-07-29T05:34:54.632345',
'model_class': 'User',
'username': 'julia'}
"""
url = self.gi.url + "/whoami"
return self._get(url=url)
| 29.428571 | 80 | 0.56165 |
31a8b8a1531ff29b6ff6bbcfc1f7c5b17cfb7400 | 501 | py | Python | tests/test_named_values.py | KolesnichenkoDS/PhysPy | 09452288a660a0c18fe0fef52b8e8f0d4d23ed8c | [
"MIT"
] | null | null | null | tests/test_named_values.py | KolesnichenkoDS/PhysPy | 09452288a660a0c18fe0fef52b8e8f0d4d23ed8c | [
"MIT"
] | null | null | null | tests/test_named_values.py | KolesnichenkoDS/PhysPy | 09452288a660a0c18fe0fef52b8e8f0d4d23ed8c | [
"MIT"
] | null | null | null | from unittest import TestCase
import physpy
class TestNamedValue(TestCase):
def test_one_value_approx(self):
'''calculate an approximate value for one measurement'''
x = physpy.value('x', [42], 0)
self.assertEqual(x.approx(), 42)
def test_ten_values_approx(self):
'''calculate an approximate value for ten measurements'''
x = physpy.value('x', [
3, 4, 3, 4, 3, 4, 3, 4, 3, 4
], 0)
self.assertEqual(x.approx(), 3.5) | 31.3125 | 65 | 0.596806 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.