hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acfe2d3caaa38637a67afcef9250ffcd4adba436
| 11,637
|
py
|
Python
|
src/repobee_plug/_pluginmeta.py
|
DD2480-Group-18/repobee
|
1dd79bb6ace3c00b920ef0b32664847cd5b12f84
|
[
"MIT"
] | 39
|
2019-04-02T15:53:23.000Z
|
2022-03-07T02:38:41.000Z
|
src/repobee_plug/_pluginmeta.py
|
DD2480-Group-18/repobee
|
1dd79bb6ace3c00b920ef0b32664847cd5b12f84
|
[
"MIT"
] | 788
|
2019-03-31T13:55:53.000Z
|
2022-03-29T20:41:02.000Z
|
src/repobee_plug/_pluginmeta.py
|
slarse/repobee
|
03fcf90dc0244e0274a890d2a897752889c70326
|
[
"MIT"
] | 18
|
2020-06-15T11:49:50.000Z
|
2022-03-06T19:05:53.000Z
|
import argparse
import shlex
import itertools
import inspect
import re
from typing import List, Tuple, Union, Iterator, Any, Optional
import repobee_plug.config
from repobee_plug import exceptions
from repobee_plug import _corehooks
from repobee_plug import _exthooks
from repobee_plug import cli
from repobee_plug.cli import base
from repobee_plug.cli.args import ConfigurableArguments
from repobee_plug.hook import hookimpl
from repobee_plug.cli.args import _Option, _MutuallyExclusiveGroup
_HOOK_METHODS = {
key: value
for key, value in itertools.chain(
_corehooks.__dict__.items(), _exthooks.__dict__.items()
)
if callable(value) and not key.startswith("_")
}
class _PluginMeta(type):
"""Metaclass used for converting methods with appropriate names into
hook methods. It ensures that all public methods have the name of a hook
method.
Checking signatures is handled by pluggy on registration.
"""
def __new__(cls, name, bases, attrdict):
"""Check that all public methods have hook names, convert to hook
methods and return a new instance of the class. If there are any
public methods that have non-hook names,
:py:function:`repobee_plug.exception.HookNameError` is raised.
Checking signatures is delegated to ``pluggy`` during registration of
the hook.
"""
if cli.Command in bases or cli.CommandExtension in bases:
attrdict = _process_cli_plugin(bases, attrdict)
methods = cls._extract_public_methods(attrdict)
cls._check_names(methods)
hooked_methods = {
name: hookimpl(method) for name, method in methods.items()
}
attrdict.update(hooked_methods)
return super().__new__(cls, name, bases, attrdict)
@staticmethod
def _check_names(methods):
hook_names = set(_HOOK_METHODS.keys())
method_names = set(methods.keys())
if not method_names.issubset(hook_names):
raise exceptions.HookNameError(
f"public method(s) with non-hook name: {', '.join(method_names - hook_names)}"
)
@staticmethod
def _extract_public_methods(attrdict):
return {
key: value
for key, value in attrdict.items()
if callable(value)
and not key.startswith("_")
and key not in ["command", "attach_options"]
}
def _process_cli_plugin(bases, attrdict) -> dict:
"""Process a CLI plugin, generate its hook functions, and return a new
attrdict with all attributes set correctly.
"""
attrdict_copy = dict(attrdict) # copy to avoid mutating original
if cli.Command in bases and cli.CommandExtension in bases:
raise exceptions.PlugError(
"A plugin cannot be both a Command and a CommandExtension"
)
if cli.Command in bases:
settings = attrdict_copy.get("__settings__", cli.command_settings())
attrdict_copy["__settings__"] = settings
_check_base_parsers(settings.base_parsers or [], attrdict_copy)
elif cli.CommandExtension in bases:
if "__settings__" not in attrdict_copy:
raise exceptions.PlugError(
"CommandExtension must have a '__settings__' attribute"
)
handle_processed_args = _generate_handle_processed_args_func()
attrdict_copy[handle_processed_args.__name__] = handle_processed_args
attrdict_copy["attach_options"] = _attach_options
configurable_argnames = list(_get_configurable_arguments(attrdict))
if configurable_argnames:
def get_configurable_args(self) -> ConfigurableArguments:
return ConfigurableArguments(
config_section_name=self.__settings__.config_section_name
or self.__plugin_name__,
argnames=list(
_get_configurable_arguments(self.__class__.__dict__)
),
)
attrdict_copy[get_configurable_args.__name__] = get_configurable_args
return attrdict_copy
def _check_base_parsers(
base_parsers: List[base.BaseParser], attrdict: dict
) -> None:
"""Check that the base parser list fulfills all requirements."""
if base.BaseParser.REPO_DISCOVERY in base_parsers:
# the REPO_DISCOVERY parser requires both the STUDENTS parser and
# the api argument to the command function, see
# https://github.com/repobee/repobee/issues/716 for details
if base.BaseParser.STUDENTS not in base_parsers:
raise exceptions.PlugError(
"REPO_DISCOVERY parser requires STUDENT parser"
)
elif "api" not in inspect.signature(attrdict["command"]).parameters:
raise exceptions.PlugError(
"REPO_DISCOVERY parser requires command function to use api "
"argument"
)
def _get_configurable_arguments(attrdict: dict) -> List[str]:
"""Returns a list of configurable argument names."""
cli_args = _extract_flat_cli_options(attrdict)
return [
arg_name
for arg_name, arg in cli_args
if hasattr(arg, "configurable") and getattr(arg, "configurable")
]
def _extract_cli_options(
attrdict,
) -> List[Tuple[str, Union[_Option, _MutuallyExclusiveGroup]]]:
"""Returns any members that are CLI options as a list of tuples on the form
(member_name, option).
"""
return [
(key, value)
for key, value in attrdict.items()
if cli.is_cli_arg(value)
]
def _extract_flat_cli_options(
attrdict,
) -> Iterator[Tuple[str, Union[_Option, _MutuallyExclusiveGroup]]]:
"""Like _extract_cli_options, but flattens nested options such as mutex
groups.
"""
cli_args = _extract_cli_options(attrdict)
return itertools.chain.from_iterable(map(_flatten_arg, cli_args))
def _attach_options(self, config: repobee_plug.config.Config, parser):
parser = (
parser
if not isinstance(self, cli.CommandExtension)
else parser.add_argument_group(
title=self.__plugin_name__,
description=f"Arguments for the {self.__plugin_name__} plugin",
)
)
section_name = (
self.__settings__.config_section_name or self.__plugin_name__
)
opts = _extract_cli_options(self.__class__.__dict__)
for (arg_name, opt) in opts:
configured_value = config.get(section_name, arg_name)
if configured_value and not getattr(opt, "configurable", None):
raise exceptions.PlugError(
f"Plugin '{self.__plugin_name__}' does not allow "
f"'{arg_name}' to be configured"
)
converted_value = (
_convert_configured_value(opt, configured_value)
if isinstance(opt, _Option)
else None
)
_add_option(arg_name, opt, converted_value, parser)
return parser
def _convert_configured_value(
opt: _Option, configured_value: Optional[Any]
) -> Optional[Any]:
"""Try to fetch a configured value from the config, respecting the
converter of the option and also handling list-like arguments.
Returns:
The configured value, or none if there was no configured value.
"""
if (
configured_value
and opt.argparse_kwargs
and re.match(r"\+|\*|\d+", str(opt.argparse_kwargs.get("nargs")))
):
individual_args = shlex.split(configured_value)
converter = opt.converter if opt.converter else lambda x: x
return tuple(map(converter, individual_args))
else:
return configured_value
def _generate_handle_processed_args_func():
def handle_processed_args(self, args):
self.args = args
flattened_args = _extract_flat_cli_options(self.__class__.__dict__)
for name, arg in flattened_args:
dest = (
name
if "dest" not in arg.argparse_kwargs
else arg.argparse_kwargs["dest"]
)
if dest in args:
parsed_arg = getattr(args, dest)
setattr(self, dest, parsed_arg)
return handle_processed_args
def _flatten_arg(arg_tup):
name, arg = arg_tup
assert cli.is_cli_arg(arg)
if isinstance(arg, _MutuallyExclusiveGroup):
return itertools.chain.from_iterable(map(_flatten_arg, arg.options))
else:
return [arg_tup]
def _add_option(
name: str,
opt: Union[_Option, _MutuallyExclusiveGroup],
configured_value: Optional[Any],
parser: Union[argparse.ArgumentParser, argparse._MutuallyExclusiveGroup],
) -> None:
"""Add an option to the parser based on the cli option."""
if isinstance(opt, _MutuallyExclusiveGroup):
mutex_parser = parser.add_mutually_exclusive_group(
required=opt.required
)
for (mutex_opt_name, mutex_opt) in opt.options:
_add_option(
mutex_opt_name, mutex_opt, configured_value, mutex_parser
)
return
elif opt.argument_type == cli.args._ArgumentType.IGNORE:
return
assert isinstance(opt, _Option)
args = []
kwargs = dict(opt.argparse_kwargs or {})
if opt.converter:
kwargs["type"] = opt.converter
kwargs["help"] = opt.help or ""
if opt.argument_type in [
cli.args._ArgumentType.OPTION,
cli.args._ArgumentType.FLAG,
]:
if opt.short_name:
args.append(opt.short_name)
assert isinstance(opt.long_name, str)
args.append(opt.long_name)
kwargs["dest"] = name
if not opt.argument_type == cli.args._ArgumentType.FLAG:
# configured value takes precedence over default
kwargs["default"] = configured_value or opt.default
# required opts become not required if configured
kwargs["required"] = not configured_value and opt.required
elif opt.argument_type == cli.args._ArgumentType.POSITIONAL:
args.append(name)
parser.add_argument(*args, **kwargs)
class Plugin(metaclass=_PluginMeta):
"""This is a base class for plugin classes. For plugin classes to be picked
up by RepoBee, they must inherit from this class.
Public methods must be hook methods. If there are any public methods that
are not hook methods, an error is raised on creation of the class. As long
as the method has the correct name, it will be recognized as a hook method
during creation. However, if the signature is incorrect, the plugin
framework will raise a runtime exception once it is called. Private methods
(i.e. methods prefixed with ``_``) carry no restrictions.
The signatures of hook methods are not checked until the plugin class is
registered by the :py:const:`repobee_plug.manager` (an instance of
:py:class:`pluggy.manager.PluginManager`). Therefore, when testing a
plugin, it is a good idea to include a test where it is registered with the
manager to ensure that it has the correct signatures.
A plugin class is instantiated exactly once; when RepoBee loads the plugin.
This means that any state that is stored in the plugin will be carried
throughout the execution of a RepoBee command. This makes plugin classes
well suited for implementing tasks that require command line options or
configuration values, as well as for implementing extension commands.
"""
def __init__(self, plugin_name: str):
"""
Args:
plugin_name: Name of the plugin that this instance belongs to.
"""
self.__plugin_name__ = plugin_name
| 35.051205
| 94
| 0.673713
|
acfe2e87235e07521b7b597f671efe0219576cfe
| 6,320
|
py
|
Python
|
sis-api/core/repository/quote.py
|
maxbilbow/7054CEM-sis
|
1c5067c9afc38e340fcce046048f8ae21d267365
|
[
"MIT"
] | null | null | null |
sis-api/core/repository/quote.py
|
maxbilbow/7054CEM-sis
|
1c5067c9afc38e340fcce046048f8ae21d267365
|
[
"MIT"
] | null | null | null |
sis-api/core/repository/quote.py
|
maxbilbow/7054CEM-sis
|
1c5067c9afc38e340fcce046048f8ae21d267365
|
[
"MIT"
] | null | null | null |
import dataclasses
import time
from typing import Optional, List
from core.model.driver_details import DriverDetails
from core.model.driver_history import DriverHistory
from core.model.home_details import HomeDetails
from core.model.home_quote_sections import HomeQuoteSections
from core.model.insurance_policy import InsuranceType
from core.model.profile import Profile
from core.model.quote import Quote
from core.model.quote_sections import QuoteSections
from core.model.vehicle_details import VehicleDetails
from core.model.vehicle_quote_sections import VehicleQuoteSections
from core.model.vehicle_usage import VehicleUsage
from core.repository import mysql
from core.repository.mysql import DbSession
from core.repository.user_profile import UserProfileRepository
from core.utils.deserialization import deserialize
class QuoteRepository:
@staticmethod
def new_motor_quote(user_id: int, profile: Profile):
quote = Quote(id=None, user_id=user_id, type=InsuranceType.Motor, sections=QuoteSections())
with mysql.session() as s:
quote_id = s.on_table("quote").insert(quote)
driver_history = DriverHistory()
driver_history.id = s.on_table("driver_history").insert(driver_history)
driver_details = DriverDetails(quote_id, personal_details=profile.personal_details,
driver_history=driver_history)
sections = VehicleQuoteSections(
quote_id=quote_id, driver_details=driver_details, vehicle_details=VehicleDetails(quote_id),
vehicle_usage=VehicleUsage(quote_id)
)
quote = dataclasses.replace(quote, sections=sections, id=quote_id)
s.on_table("quote_sections").insert(sections)
s.on_table("driver_details").insert(driver_details)
s.on_table("vehicle_details").insert(sections.vehicle_details)
s.on_table("vehicle_usage").insert(sections.vehicle_usage)
s.commit()
return QuoteRepository.find_by_id(quote.id)
@staticmethod
def new_home_quote(user_id: int, profile: Profile):
quote = Quote(id=None, user_id=user_id, type=InsuranceType.Home, sections=QuoteSections())
with mysql.session() as s:
quote_id = s.on_table("quote").insert(quote)
sections = HomeQuoteSections(
quote_id=quote_id, personal_details=profile.personal_details,
home_details=HomeDetails(quote_id, address=profile.personal_details.address)
)
quote = dataclasses.replace(quote, sections=sections, id=quote_id)
s.on_table("home_details").insert(sections.home_details)
s.on_table("quote_sections").insert(sections)
s.commit()
return quote # QuoteRepository.find_by_id(quote_id)
@staticmethod
def find_by_id(id: int) -> Optional[Quote]:
with mysql.session() as s:
quote_matcher = "quote_id", id
quote = s.on_table("quote").find_by(id).fetchone()
if quote is None:
return None
sections = s.on_table("quote_sections").find_by(quote_matcher).fetchone()
if quote["type"] == InsuranceType.Motor.name:
sections["driver_details"] = s.on_table("driver_details").find_by(quote_matcher).fetchone()
personal_details_id = sections["driver_details"]["personal_details_id"]
driver_history_id = sections["driver_details"]["driver_history_id"]
sections["driver_details"]["personal_details"] = UserProfileRepository.find_personal_details(s,
personal_details_id)
sections["driver_details"]["driver_history"] = \
s.on_table("driver_history").find_by(driver_history_id).fetchone()
sections["vehicle_details"] = s.on_table("vehicle_details").find_by(quote_matcher).fetchone()
sections["vehicle_usage"] = s.on_table("vehicle_usage").find_by(quote_matcher).fetchone()
else:
sections["personal_details"] = UserProfileRepository.find_personal_details(s, sections[
"personal_details_id"])
quote["sections"] = sections
return deserialize(quote, Quote)
@staticmethod
def find_by_userid(user_id: int) -> List[Quote]:
rows = mysql.find_all_by(table_name="quote", key="user_id", key_value=user_id)
return [deserialize(row, Quote) for row in rows]
@staticmethod
def update(quote: Quote) -> Quote:
quote = dataclasses.replace(quote, updated=int(time.time() * 1000))
with mysql.session() as s:
s.on_table("quote").update(quote)
if quote.type is InsuranceType.Motor:
sections: VehicleQuoteSections = quote.sections
s.on_table("driver_history").update(sections.driver_details.driver_history)
s.on_table("personal_details").update(sections.driver_details.personal_details)
s.on_table("driver_details").update(sections.driver_details)
s.on_table("vehicle_details").update(sections.vehicle_details)
s.on_table("vehicle_usage").update(sections.vehicle_usage)
s.on_table("address").update(sections.driver_details.personal_details.address)
else:
sections: HomeQuoteSections = quote.sections
s.on_table("home_details").update(sections.home_details)
s.on_table("personal_details").update(sections.personal_details)
s.on_table("address").update(sections.personal_details.address)
s.on_table("quote_sections").update(quote.sections)
s.commit()
return QuoteRepository.find_by_id(quote.id)
@staticmethod
def delete(id: int):
with mysql.session() as s:
s.on_table("quote").delete(id)
s.commit()
@staticmethod
def get_personal_details(s: DbSession, personal_details_id: int) -> Optional[dict]:
personal_details = s.on_table("personal_details").find_by(personal_details_id).fetchone()
if personal_details is None:
return None
| 50.56
| 129
| 0.664873
|
acfe30089f1a181284507a6c3fadd02e1db86d09
| 3,468
|
py
|
Python
|
newrelic_lambda_cli/subscriptions.py
|
salvatorious/newrelic-lambda-cli
|
22fb46930bbf138d2ad65cf46040b185072d775a
|
[
"Apache-2.0"
] | null | null | null |
newrelic_lambda_cli/subscriptions.py
|
salvatorious/newrelic-lambda-cli
|
22fb46930bbf138d2ad65cf46040b185072d775a
|
[
"Apache-2.0"
] | null | null | null |
newrelic_lambda_cli/subscriptions.py
|
salvatorious/newrelic-lambda-cli
|
22fb46930bbf138d2ad65cf46040b185072d775a
|
[
"Apache-2.0"
] | null | null | null |
import botocore
import click
from .cli.cliutils import failure
from .functions import get_function
def get_subscription_filters(session, function_name):
"""Returns all the log subscription filters for the function"""
log_group_name = "/aws/lambda/%s" % function_name
try:
res = session.client("logs").describe_subscription_filters(
logGroupName=log_group_name
)
except botocore.exceptions.ClientError:
return []
else:
return res.get("subscriptionFilters", [])
def create_subscription_filter(session, function_name, destination_arn):
return session.client("logs").put_subscription_filter(
logGroupName="/aws/lambda/%s" % function_name,
filterName="NewRelicLogStreaming",
filterPattern="NR_LAMBDA_MONITORING",
destinationArn=destination_arn,
)
def remove_subscription_filter(session, function_name):
return session.client("logs").delete_subscription_filter(
logGroupName="/aws/lambda/%s" % function_name, filterName="NewRelicLogStreaming"
)
def create_log_subscription(session, function_name):
destination = get_function(session, "newrelic-log-ingestion")
if destination is None:
failure(
"Could not find 'newrelic-log-ingestion' function. Is the New Relic AWS "
"integration installed?"
)
return
destination_arn = destination["Configuration"]["FunctionArn"]
subscription_filters = [
filter for filter in get_subscription_filters(session, function_name)
]
newrelic_filters = [
filter
for filter in subscription_filters
if filter["filterName"] == "NewRelicLogStreaming"
]
if len(subscription_filters) > len(newrelic_filters):
click.echo(
"WARNING: Found a log subscription filter that was not installed by New "
"Relic. This may prevent the New Relic log subscription filter from being "
"installed. If you know you don't need this log subscription filter, you "
"should first remove it and rerun this command. If your organization "
"requires this log subscription filter, please contact New Relic at "
"serverless@newrelic.com for assistance with getting the AWS log "
"subscription filter limit increased.",
color="blue",
)
if not newrelic_filters:
click.echo("Adding New Relic log subscription to '%s'" % function_name)
create_subscription_filter(session, function_name, destination_arn)
else:
click.echo(
"Found log subscription for '%s', verifying configuration" % function_name
)
newrelic_filter = newrelic_filters[0]
if newrelic_filter["filterPattern"] == "":
remove_subscription_filter(session, function_name)
create_subscription_filter(session, function_name, destination_arn)
def remove_log_subscription(session, function_name):
subscription_filters = [
filter
for filter in get_subscription_filters(session, function_name)
if filter["filterName"] == "NewRelicLogStreaming"
]
if not subscription_filters:
click.echo(
"No New Relic subscription filters found for '%s', skipping" % function_name
)
else:
click.echo("Removing New Relic log subscription from '%s'" % function_name)
remove_subscription_filter(session, function_name)
| 38.533333
| 88
| 0.684833
|
acfe30cf926557dd7281aee077c7d1a5f2f80f62
| 3,912
|
py
|
Python
|
imix/models/vqa_models/oscar/run_nlvr.py
|
linxi1158/iMIX
|
af87a17275f02c94932bb2e29f132a84db812002
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
imix/models/vqa_models/oscar/run_nlvr.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
imix/models/vqa_models/oscar/run_nlvr.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020 Microsoft Corporation. Licensed under the MIT license.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import logging
import torch
from transformers import BertConfig
from .modeling.modeling_bert import ImageBertForMultipleChoice, ImageBertForSequenceClassification
from .utils.task_utils import processors
from imix.models.builder import VQA_MODELS
from ..base_model import BaseModel
import sys
sys.path.insert(0, '.')
logger = logging.getLogger(__name__)
@VQA_MODELS.register_module()
class OSCAR_NLVR(BaseModel):
def __init__(self, **kwargs):
super().__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
args = kwargs['params']
# Prepare GLUE task
task_name = args.task_name.lower()
if task_name not in processors:
raise ValueError('Task not found: %s' % (task_name))
num_labels = args.num_labels
logger.info('Task Name: {}, #Labels: {}'.format(task_name, num_labels))
self.model_type = args.model_type.lower()
config = BertConfig.from_pretrained(
args.config_name if args.config_name else args.model_name_or_path,
num_labels=num_labels,
finetuning_task=task_name)
# discrete code
config.img_feature_dim = args.img_feature_dim
config.img_feature_type = args.img_feature_type
config.code_voc = args.code_voc
config.hidden_dropout_prob = args.drop_out
config.loss_type = args.loss_type
config.use_layernorm = args.use_layernorm
config.classifier = args.classifier
config.cls_hidden_scale = args.cls_hidden_scale
config.num_choice = args.num_choice
model_class = ImageBertForSequenceClassification
if args.use_pair:
model_class = ImageBertForMultipleChoice
self.model = model_class.from_pretrained(
args.model_name_or_path, from_tf=bool('.ckpt' in args.model_name_or_path), config=config)
# total_params = sum(p.numel() for p in self.model.parameters())
# logger.info('Model Parameters: {}'.format(total_params))
self.img_feature_dim = args.img_feature_dim
def forward_train(self, data, **kwargs):
"""Train the model."""
batch = tuple(t.to(self.device) for t in data)
inputs = {
'input_ids': batch[0],
'attention_mask': batch[1],
'token_type_ids': batch[2] if self.model_type in ['bert', 'xlnet'] else None, # XLM don't use segment_ids
'labels': batch[3],
'img_feats': None if self.img_feature_dim == -1 else batch[4]
}
outputs = self.model(**inputs)
logits = outputs[0]
val, idx = logits.max(1)
batch_score = torch.sum(idx == batch[3].view(-1))
batch_size = batch[0].size(0)
model_output = {
'scores': logits,
'target': batch[3],
'batch_score': batch_score,
'batch_size': batch_size,
}
return model_output
def forward_test(self, data, **kwargs):
model_output = self.forward_train(data, **kwargs)
return model_output
| 37.615385
| 118
| 0.674591
|
acfe3195aad6fefedc02c0d6b9ad5719a36781a1
| 46
|
py
|
Python
|
tests/components/alarmdecoder/__init__.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/alarmdecoder/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/alarmdecoder/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""The tests for AlarmDecoder integration."""
| 23
| 45
| 0.73913
|
acfe31b626ab0bc0289652e0d6b9f6bb551bcd38
| 38,561
|
py
|
Python
|
tests/dashboard/test_product.py
|
adonig/saleor
|
41f2148da45acd8309faeda5331ca5fc93a0d201
|
[
"BSD-3-Clause"
] | 6
|
2019-01-06T08:39:20.000Z
|
2022-03-04T18:07:47.000Z
|
tests/dashboard/test_product.py
|
adonig/saleor
|
41f2148da45acd8309faeda5331ca5fc93a0d201
|
[
"BSD-3-Clause"
] | 3
|
2020-02-11T23:51:31.000Z
|
2022-02-10T22:42:12.000Z
|
tests/dashboard/test_product.py
|
adonig/saleor
|
41f2148da45acd8309faeda5331ca5fc93a0d201
|
[
"BSD-3-Clause"
] | 1
|
2021-04-22T12:10:35.000Z
|
2021-04-22T12:10:35.000Z
|
import json
from unittest.mock import MagicMock, Mock
from django.forms import HiddenInput
from django.forms.models import model_to_dict
from django.urls import reverse
from prices import Money, MoneyRange, TaxedMoney, TaxedMoneyRange
from tests.utils import get_redirect_location
from saleor.dashboard.product import ProductBulkAction
from saleor.dashboard.product.forms import ProductForm, ProductVariantForm
from saleor.product.forms import VariantChoiceField
from saleor.product.models import (
AttributeValue, Collection, Product, Attribute, ProductImage,
ProductType, ProductVariant)
from ..utils import create_image
def test_view_product_list_with_filters(admin_client, product_list):
url = reverse('dashboard:product-list')
data = {
'price_max': [''], 'price_min': [''], 'is_featured': [''],
'name': ['Test'], 'sort_by': [''], 'is_published': ['']}
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['filter_set'].qs) == product_list
def test_view_product_list_with_filters_sort_by(admin_client, product_list):
url = reverse('dashboard:product-list')
data = {
'price_max': [''], 'price_min': [''], 'is_featured': [''],
'name': ['Test'], 'sort_by': ['name'], 'is_published': ['']}
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['filter_set'].qs) == product_list
data['sort_by'] = ['-name']
url = reverse('dashboard:product-list')
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['filter_set'].qs) == product_list[::-1]
def test_view_product_list_with_filters_is_published(
admin_client, product_list, category):
url = reverse('dashboard:product-list')
data = {
'price_max': [''], 'price_min': [''], 'is_featured': [''],
'name': ['Test'], 'sort_by': ['name'], 'category': category.pk,
'is_published': ['1']}
response = admin_client.get(url, data)
assert response.status_code == 200
result = list(response.context['filter_set'].qs)
assert result == [product_list[0], product_list[2]]
def test_view_product_list_with_filters_no_results(admin_client, product_list):
url = reverse('dashboard:product-list')
data = {
'price_max': [''], 'price_min': [''], 'is_featured': [''],
'name': ['BADTest'], 'sort_by': [''], 'is_published': ['']}
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['filter_set'].qs) == []
def test_view_product_list_pagination(admin_client, product_list, settings):
settings.DASHBOARD_PAGINATE_BY = 1
url = reverse('dashboard:product-list')
data = {'page': '1'}
response = admin_client.get(url, data)
assert response.status_code == 200
assert not response.context['filter_set'].is_bound_unsorted
data = {'page': '2'}
response = admin_client.get(url, data)
assert response.status_code == 200
assert not response.context['filter_set'].is_bound_unsorted
def test_view_product_list_pagination_with_filters(
admin_client, product_list, settings):
settings.DASHBOARD_PAGINATE_BY = 1
url = reverse('dashboard:product-list')
data = {
'page': '1', 'price_max': [''], 'price_min': [''], 'is_featured': [''],
'name': ['Test'], 'sort_by': ['name'], 'is_published': ['']}
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['products'])[0] == product_list[0]
data['page'] = '2'
response = admin_client.get(url, data)
assert response.status_code == 200
assert list(response.context['products'])[0] == product_list[1]
def test_view_product_details(admin_client, product):
price = TaxedMoney(net=Money(10, 'USD'), gross=Money(10, 'USD'))
sale_price = TaxedMoneyRange(start=price, stop=price)
purchase_cost = MoneyRange(start=Money(1, 'USD'), stop=Money(1, 'USD'))
url = reverse('dashboard:product-details', kwargs={'pk': product.pk})
response = admin_client.get(url)
assert response.status_code == 200
context = response.context
assert context['product'] == product
assert context['sale_price'] == sale_price
assert context['purchase_cost'] == purchase_cost
assert context['margin'] == (90, 90)
def test_view_product_toggle_publish(db, admin_client, product):
url = reverse('dashboard:product-publish', kwargs={'pk': product.pk})
expected_response = {'success': True, 'is_published': False}
response = admin_client.post(url)
assert response.status_code == 200
assert json.loads(response.content.decode('utf8')) == expected_response
product.refresh_from_db()
assert not product.is_published
admin_client.post(url)
product.refresh_from_db()
assert product.is_published
def test_view_product_select_type_display_modal(admin_client):
url = reverse('dashboard:product-add-select-type')
response = admin_client.get(url)
assert response.status_code == 200
def test_view_product_select_type(admin_client, product_type):
url = reverse('dashboard:product-add-select-type')
data = {'product_type': product_type.pk}
response = admin_client.post(url, data)
assert get_redirect_location(response) == reverse(
'dashboard:product-add', kwargs={'type_pk': product_type.pk})
assert response.status_code == 302
def test_view_product_select_type_by_ajax(admin_client, product_type):
url = reverse('dashboard:product-add-select-type')
data = {'product_type': product_type.pk}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
resp_decoded = json.loads(response.content.decode('utf-8'))
assert resp_decoded.get('redirectUrl') == reverse(
'dashboard:product-add', kwargs={'type_pk': product_type.pk})
def test_view_product_create(admin_client, product_type, category):
url = reverse('dashboard:product-add', kwargs={'type_pk': product_type.pk})
data = {
'name': 'Product', 'description': 'This is product description.',
'price': 10, 'category': category.pk, 'variant-sku': '123',
'variant-quantity': 2}
response = admin_client.post(url, data)
assert response.status_code == 302
product = Product.objects.first()
assert get_redirect_location(response) == reverse(
'dashboard:product-details', kwargs={'pk': product.pk})
assert Product.objects.count() == 1
def test_view_product_edit(admin_client, product):
url = reverse('dashboard:product-update', kwargs={'pk': product.pk})
data = {
'name': 'Product second name', 'description': 'Product description.',
'price': 10, 'category': product.category.pk, 'variant-sku': '123',
'variant-quantity': 10}
response = admin_client.post(url, data)
assert response.status_code == 302
product.refresh_from_db()
assert get_redirect_location(response) == reverse(
'dashboard:product-details', kwargs={'pk': product.pk})
assert product.name == 'Product second name'
def test_view_product_delete(db, admin_client, product):
url = reverse('dashboard:product-delete', kwargs={'pk': product.pk})
response = admin_client.post(url)
assert response.status_code == 302
assert not Product.objects.filter(pk=product.pk)
def test_view_product_not_deleted_before_confirmation(
db, admin_client, product):
url = reverse('dashboard:product-delete', kwargs={'pk': product.pk})
response = admin_client.get(url)
assert response.status_code == 200
product.refresh_from_db()
def test_view_product_bulk_update_publish(admin_client, product_list):
url = reverse('dashboard:product-bulk-update')
products = [product.pk for product in product_list]
data = {'action': ProductBulkAction.PUBLISH, 'products': products}
response = admin_client.post(url, data)
assert response.status_code == 302
assert get_redirect_location(response) == reverse('dashboard:product-list')
for p in product_list:
p.refresh_from_db()
assert p.is_published
def test_view_product_bulk_update_unpublish(admin_client, product_list):
url = reverse('dashboard:product-bulk-update')
products = [product.pk for product in product_list]
data = {'action': ProductBulkAction.UNPUBLISH, 'products': products}
response = admin_client.post(url, data)
def test_view_ajax_products_list(admin_client, product):
url = reverse('dashboard:ajax-products')
response = admin_client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
resp_decoded = json.loads(response.content.decode('utf-8'))
assert resp_decoded.get('results') == [
{'id': product.id, 'text': str(product)}]
def test_view_product_type_list(admin_client, product_type):
url = reverse('dashboard:product-type-list')
response = admin_client.get(url)
assert response.status_code == 200
assert len(response.context['product_types']) == 1
def test_view_product_type_list_with_filters(admin_client, product_type):
url = reverse('dashboard:product-type-list')
data = {'name': ['Default Ty'], 'sort_by': ['']}
response = admin_client.get(url, data)
assert response.status_code == 200
assert product_type in response.context['filter_set'].qs
assert len(response.context['filter_set'].qs) == 1
def test_view_product_type_create(
admin_client, color_attribute, size_attribute):
url = reverse('dashboard:product-type-add')
data = {
'name': 'Testing Type',
'product_attributes': [color_attribute.pk],
'variant_attributes': [size_attribute.pk],
'has_variants': True,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 302
assert get_redirect_location(response) == reverse(
'dashboard:product-type-list')
assert ProductType.objects.count() == 1
def test_view_product_type_create_invalid(
admin_client, color_attribute, size_attribute):
url = reverse('dashboard:product-type-add')
# Don't allow same attribute in both fields
data = {
'name': 'Testing Type',
'product_attributes': [size_attribute.pk],
'variant_attributes': [color_attribute.pk, size_attribute.pk],
'has_variants': True,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 200
assert ProductType.objects.count() == 0
def test_view_product_type_create_missing_variant_attributes(
admin_client, color_attribute, size_attribute):
url = reverse('dashboard:product-type-add')
data = {
'name': 'Testing Type',
'product_attributes': [color_attribute.pk],
'variant_attributes': [size_attribute.pk],
'has_variants': False,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 200
assert ProductType.objects.count() == 0
def test_view_product_type_create_variantless(
admin_client, color_attribute, size_attribute):
url = reverse('dashboard:product-type-add')
data = {
'name': 'Testing Type',
'product_attributes': [color_attribute.pk],
'variant_attributes': [],
'has_variants': False,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 302
assert get_redirect_location(response) == reverse(
'dashboard:product-type-list')
assert ProductType.objects.count() == 1
def test_view_product_type_create_variantless_invalid(
admin_client, color_attribute, size_attribute):
url = reverse('dashboard:product-type-add')
# Don't allow variant attributes when no variants
data = {
'name': 'Testing Type',
'product_attributes': [color_attribute.pk],
'variant_attributes': [size_attribute.pk],
'has_variants': False,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 200
assert ProductType.objects.count() == 0
def test_view_product_type_edit_to_no_variants_valid(admin_client, product):
product_type = ProductType.objects.create(
name='New product type', has_variants=True)
product.product_type = product_type
product.save()
url = reverse(
'dashboard:product-type-update', kwargs={'pk': product_type.pk})
# When all products have only one variant you can change
# has_variants to false
data = {
'name': product_type.name,
'product_attributes': product_type.product_attributes.values_list(
'pk', flat=True),
'variant_attributes': product_type.variant_attributes.values_list(
'pk', flat=True),
'has_variants': False,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 302
assert get_redirect_location(response) == url
product_type.refresh_from_db()
assert not product_type.has_variants
assert product.variants.count() == 1
def test_view_product_type_edit_to_no_variants_invalid(admin_client, product):
product_type = ProductType.objects.create(
name='New product type', has_variants=True)
product.product_type = product_type
product.save()
product.variants.create(sku='12345')
url = reverse(
'dashboard:product-type-update', kwargs={'pk': product_type.pk})
# Test has_variants validator which prevents turning off when product
# has multiple variants
data = {
'name': product_type.name,
'product_attributes': product_type.product_attributes.values_list(
'pk', flat=True),
'variant_attributes': product_type.variant_attributes.values_list(
'pk', flat=True),
'has_variants': False,
'weight': ['3.47']}
response = admin_client.post(url, data)
assert response.status_code == 200
product_type.refresh_from_db()
assert product_type.has_variants
assert product.variants.count() == 2
def test_view_product_type_delete(db, admin_client, product):
product_type = product.product_type
url = reverse(
'dashboard:product-type-delete', kwargs={'pk': product_type.pk})
response = admin_client.post(url)
assert response.status_code == 302
assert not ProductType.objects.filter(pk=product_type.pk)
def test_view_product_type_not_deleted_before_confirmation(
admin_client, product):
product_type = product.product_type
url = reverse(
'dashboard:product-type-delete', kwargs={'pk': product_type.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert ProductType.objects.filter(pk=product_type.pk)
def test_view_product_variant_details(admin_client, product):
product_type = product.product_type
product_type.has_variants = True
product_type.save()
variant = product.variants.get()
url = reverse(
'dashboard:variant-details',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
response = admin_client.get(url)
assert response.status_code == 200
context = response.context
assert context['product'] == product
assert context['variant'] == variant
assert context['images'].count() == 0
assert context['margin'] == 90
assert context['discounted_price'] == variant.base_price
def test_view_product_variant_details_redirect_to_product(
admin_client, product):
variant = product.variants.get()
url = reverse(
'dashboard:variant-details',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
response = admin_client.get(url)
assert response.status_code == 302
assert get_redirect_location(response) == reverse(
'dashboard:product-details', kwargs={'pk': product.pk})
def test_view_product_variant_create(admin_client, product):
product_type = product.product_type
product_type.has_variants = True
product_type.save()
url = reverse('dashboard:variant-add', kwargs={'product_pk': product.pk})
data = {
'sku': 'ABC', 'price_override': '', 'quantity': 10, 'cost_price': ''}
response = admin_client.post(url, data)
assert response.status_code == 302
variant = product.variants.last()
assert get_redirect_location(response) == reverse(
'dashboard:variant-details',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
assert product.variants.count() == 2
assert variant.sku == 'ABC'
def test_view_product_variant_edit(admin_client, product):
variant = product.variants.get()
url = reverse(
'dashboard:variant-update',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
data = {
'sku': 'ABC', 'price_override': '', 'quantity': 10, 'cost_price': ''}
response = admin_client.post(url, data)
assert response.status_code == 302
variant = product.variants.last()
assert get_redirect_location(response) == reverse(
'dashboard:variant-details',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
assert variant.sku == 'ABC'
def test_view_product_variant_delete(admin_client, product):
variant = product.variants.get()
url = reverse(
'dashboard:variant-delete',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
response = admin_client.post(url)
assert response.status_code == 302
assert not ProductVariant.objects.filter(pk=variant.pk).exists()
def test_view_product_variant_not_deleted_before_confirmation(
admin_client, product):
variant = product.variants.get()
url = reverse(
'dashboard:variant-delete',
kwargs={'product_pk': product.pk, 'variant_pk': variant.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert ProductVariant.objects.filter(pk=variant.pk).exists()
def test_view_variant_images(admin_client, product_with_image):
variant = product_with_image.variants.get()
product_image = product_with_image.images.get()
url = reverse(
'dashboard:variant-images',
kwargs={'product_pk': product_with_image.pk, 'variant_pk': variant.pk})
data = {'images': [product_image.pk]}
response = admin_client.post(url, data)
assert response.status_code == 302
assert get_redirect_location(response) == reverse(
'dashboard:variant-details',
kwargs={'product_pk': product_with_image.pk, 'variant_pk': variant.pk})
assert variant.variant_images.filter(image=product_image).exists()
def test_view_ajax_available_variants_list(
admin_client, product, category, settings):
unavailable_product = Product.objects.create(
name='Test product', price=Money(10, settings.DEFAULT_CURRENCY),
product_type=product.product_type,
category=category, is_published=False)
unavailable_product.variants.create()
url = reverse('dashboard:ajax-available-variants')
response = admin_client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
resp_decoded = json.loads(response.content.decode('utf-8'))
variant = product.variants.get()
assert resp_decoded.get('results') == [
{'id': variant.id, 'text': variant.get_ajax_label()}]
def test_view_product_images(admin_client, product_with_image):
product_image = product_with_image.images.get()
url = reverse(
'dashboard:product-image-list',
kwargs={'product_pk': product_with_image.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert response.context['product'] == product_with_image
assert not response.context['is_empty']
images = response.context['images']
assert len(images) == 1
assert product_image in images
def test_view_product_image_create(
monkeypatch, admin_client, product_with_image):
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
'saleor.dashboard.product.forms.create_product_thumbnails.delay',
mock_create_thumbnails)
url = reverse(
'dashboard:product-image-add',
kwargs={'product_pk': product_with_image.pk})
response = admin_client.get(url)
assert response.status_code == 200
image, image_name = create_image()
data = {'image_0': image, 'alt': ['description']}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert ProductImage.objects.count() == 2
product_with_image.refresh_from_db()
images = product_with_image.images.all()
assert len(images) == 2
assert image_name in images[1].image.name
assert images[1].alt == 'description'
mock_create_thumbnails.assert_called_once_with(images[1].pk)
def test_view_product_image_edit_same_image_add_description(
monkeypatch, admin_client, product_with_image):
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
'saleor.dashboard.product.forms.create_product_thumbnails.delay',
mock_create_thumbnails)
product_image = product_with_image.images.all()[0]
url = reverse(
'dashboard:product-image-update',
kwargs={
'img_pk': product_image.pk,
'product_pk': product_with_image.pk})
data = {'image_1': ['0.49x0.59'], 'alt': ['description']}
response = admin_client.get(url)
assert response.status_code == 200
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert product_with_image.images.count() == 1
product_image.refresh_from_db()
assert product_image.alt == 'description'
mock_create_thumbnails.assert_called_once_with(product_image.pk)
def test_view_product_image_edit_new_image(
monkeypatch, admin_client, product_with_image):
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
'saleor.dashboard.product.forms.create_product_thumbnails.delay',
mock_create_thumbnails)
product_image = product_with_image.images.all()[0]
url = reverse(
'dashboard:product-image-update',
kwargs={
'img_pk': product_image.pk,
'product_pk': product_with_image.pk})
response = admin_client.get(url)
assert response.status_code == 200
image, image_name = create_image()
data = {'image_0': image, 'alt': ['description']}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert product_with_image.images.count() == 1
product_image.refresh_from_db()
assert image_name in product_image.image.name
assert product_image.alt == 'description'
mock_create_thumbnails.assert_called_once_with(product_image.pk)
def test_view_product_image_delete(admin_client, product_with_image):
product_image = product_with_image.images.all()[0]
url = reverse(
'dashboard:product-image-delete',
kwargs={
'img_pk': product_image.pk,
'product_pk': product_with_image.pk})
response = admin_client.post(url)
assert response.status_code == 302
assert not ProductImage.objects.filter(pk=product_image.pk)
def test_view_product_image_not_deleted_before_confirmation(
admin_client, product_with_image):
product_image = product_with_image.images.all()[0]
url = reverse(
'dashboard:product-image-delete',
kwargs={
'img_pk': product_image.pk,
'product_pk': product_with_image.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert ProductImage.objects.filter(pk=product_image.pk).count()
def test_view_ajax_reorder_product_images(admin_client, product_with_images):
order_before = [img.pk for img in product_with_images.images.all()]
ordered_images = list(reversed(order_before))
url = reverse(
'dashboard:product-images-reorder',
kwargs={'product_pk': product_with_images.pk})
data = {'ordered_images': ordered_images}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
order_after = [img.pk for img in product_with_images.images.all()]
assert order_after == ordered_images
def test_view_ajax_reorder_product_images_invalid(
admin_client, product_with_images):
order_before = [img.pk for img in product_with_images.images.all()]
ordered_images = list(reversed(order_before)).append(3)
url = reverse(
'dashboard:product-images-reorder',
kwargs={'product_pk': product_with_images.pk})
data = {'ordered_images': ordered_images}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 400
resp_decoded = json.loads(response.content.decode('utf-8'))
assert 'error' in resp_decoded
assert 'ordered_images' in resp_decoded['error']
def test_view_ajax_upload_image(monkeypatch, admin_client, product_with_image):
mock_create_thumbnails = Mock(return_value=None)
monkeypatch.setattr(
'saleor.dashboard.product.forms.create_product_thumbnails.delay',
mock_create_thumbnails)
product = product_with_image
url = reverse(
'dashboard:product-images-upload', kwargs={'product_pk': product.pk})
image, image_name = create_image()
data = {'image_0': image, 'alt': ['description']}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
assert ProductImage.objects.count() == 2
product_with_image.refresh_from_db()
images = product_with_image.images.all()
assert len(images) == 2
assert image_name in images[1].image.name
mock_create_thumbnails.assert_called_once_with(images[1].pk)
def test_view_attribute_list_no_results(admin_client):
url = reverse('dashboard:attributes')
response = admin_client.get(url)
assert response.status_code == 200
assert response.context['attributes'].object_list == []
def test_view_attribute_list(db, admin_client, color_attribute):
url = reverse('dashboard:attributes')
response = admin_client.get(url)
assert response.status_code == 200
result = response.context['attributes'].object_list
assert len(result) == 1
assert result[0][0] == color_attribute.pk
assert result[0][1] == color_attribute.name
assert len(result[0][3]) == 2
assert not response.context['is_empty']
def test_view_attribute_details(admin_client, color_attribute):
url = reverse(
'dashboard:attribute-details',
kwargs={'pk': color_attribute.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert response.context['attribute'] == color_attribute
def test_view_attribute_details_no_choices(admin_client):
attribute = Attribute.objects.create(slug='size', name='Size')
url = reverse(
'dashboard:attribute-details', kwargs={'pk': attribute.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert response.context['attribute'] == attribute
def test_view_attribute_create(admin_client, color_attribute):
url = reverse('dashboard:attribute-add')
data = {'name': 'test', 'slug': 'test'}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert Attribute.objects.count() == 2
def test_view_attribute_create_not_valid(admin_client, color_attribute):
url = reverse('dashboard:attribute-add')
data = {}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert Attribute.objects.count() == 1
def test_view_attribute_edit(color_attribute, admin_client):
url = reverse(
'dashboard:attribute-update',
kwargs={'pk': color_attribute.pk})
data = {'name': 'new_name', 'slug': 'new_slug'}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
assert Attribute.objects.count() == 1
color_attribute.refresh_from_db()
assert color_attribute.name == 'new_name'
assert color_attribute.slug == 'new_slug'
def test_view_attribute_delete(admin_client, color_attribute):
url = reverse(
'dashboard:attribute-delete',
kwargs={'pk': color_attribute.pk})
response = admin_client.post(url)
assert response.status_code == 302
assert not Attribute.objects.filter(pk=color_attribute.pk).exists()
def test_view_attribute_not_deleted_before_confirmation(
admin_client, color_attribute):
url = reverse(
'dashboard:attribute-delete',
kwargs={'pk': color_attribute.pk})
response = admin_client.get(url)
assert response.status_code == 200
assert Attribute.objects.filter(pk=color_attribute.pk)
def test_view_attribute_value_create(color_attribute, admin_client):
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 2
url = reverse(
'dashboard:attribute-value-add',
kwargs={'attribute_pk': color_attribute.pk})
data = {'name': 'Pink', 'attribute': color_attribute.pk}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 3
def test_view_attribute_value_create_invalid(
color_attribute, admin_client):
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 2
url = reverse(
'dashboard:attribute-value-add',
kwargs={'attribute_pk': color_attribute.pk})
data = {}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 2
def test_view_attribute_value_edit(color_attribute, admin_client):
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 2
url = reverse(
'dashboard:attribute-value-update',
kwargs={'attribute_pk': color_attribute.pk, 'value_pk': values[0].pk})
data = {'name': 'Pink', 'attribute': color_attribute.pk}
response = admin_client.post(url, data, follow=True)
assert response.status_code == 200
values = AttributeValue.objects.filter(
attribute=color_attribute.pk, name='Pink')
assert len(values) == 1
assert values[0].name == 'Pink'
def test_view_attribute_value_delete(color_attribute, admin_client):
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert values.count() == 2
deleted_value = values[0]
url = reverse(
'dashboard:attribute-value-delete',
kwargs={
'attribute_pk': color_attribute.pk, 'value_pk': deleted_value.pk})
response = admin_client.post(url, follow=True)
assert response.status_code == 200
values = AttributeValue.objects.filter(attribute=color_attribute.pk)
assert len(values) == 1
assert deleted_value not in values
def test_view_ajax_reorder_attribute_values(
admin_client, color_attribute):
order_before = [val.pk for val in color_attribute.values.all()]
ordered_values = list(reversed(order_before))
url = reverse(
'dashboard:attribute-values-reorder',
kwargs={'attribute_pk': color_attribute.pk})
data = {'ordered_values': ordered_values}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
order_after = [val.pk for val in color_attribute.values.all()]
assert response.status_code == 200
assert order_after == ordered_values
def test_view_ajax_reorder_attribute_values_invalid(
admin_client, color_attribute):
order_before = [val.pk for val in color_attribute.values.all()]
ordered_values = list(reversed(order_before)).append(3)
url = reverse(
'dashboard:attribute-values-reorder',
kwargs={'attribute_pk': color_attribute.pk})
data = {'ordered_values': ordered_values}
response = admin_client.post(
url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 400
resp_decoded = json.loads(response.content.decode('utf-8'))
assert 'error' in resp_decoded
assert 'ordered_values' in resp_decoded['error']
def test_get_formfield_name_with_unicode_characters(db):
text_attribute = Attribute.objects.create(
slug='ąęαβδηθλμπ', name='ąęαβδηθλμπ')
assert text_attribute.get_formfield_name() == 'attribute-ąęαβδηθλμπ'
def test_product_variant_form(product):
variant = product.variants.first()
variant.name = ''
variant.save()
example_size = 'Small Size'
data = {'attribute-size': example_size, 'sku': '1111', 'quantity': 2}
form = ProductVariantForm(data, instance=variant)
assert form.is_valid()
form.save()
variant.refresh_from_db()
assert variant.name == example_size
def test_hide_field_in_variant_choice_field_form():
form = VariantChoiceField(Mock())
variants, cart = MagicMock(), MagicMock()
variants.count.return_value = variants.all().count.return_value = 1
variants.all()[0].pk = 'test'
form.update_field_data(variants, discounts=None, taxes=None)
assert isinstance(form.widget, HiddenInput)
assert form.widget.attrs.get('value') == 'test'
def test_product_form_change_attributes(db, product, color_attribute):
product_type = product.product_type
text_attribute = Attribute.objects.create(
slug='author', name='Author')
product_type.product_attributes.add(text_attribute)
color_value = color_attribute.values.first()
new_author = 'Main Tester'
data = {
'name': product.name,
'price': product.price.amount,
'category': product.category.pk,
'description': 'description',
'attribute-author': new_author,
'attribute-color': color_value.pk}
form = ProductForm(data, instance=product)
assert form.is_valid()
product = form.save()
assert product.attributes[str(color_attribute.pk)] == str(color_value.pk)
# Check that new attribute was created for author
author_value = AttributeValue.objects.get(name=new_author)
assert product.attributes[str(text_attribute.pk)] == str(author_value.pk)
def test_product_form_assign_collection_to_product(product):
collection = Collection.objects.create(name='test_collections')
data = {
'name': product.name,
'price': product.price.amount,
'category': product.category.pk,
'description': 'description',
'collections': [collection.pk]}
form = ProductForm(data, instance=product)
assert form.is_valid()
form.save()
assert product.collections.first().name == 'test_collections'
assert collection.products.first().name == product.name
def test_product_form_sanitize_product_description(
product_type, category, settings):
product = Product.objects.create(
name='Test Product', price=Money(10, settings.DEFAULT_CURRENCY),
description='', pk=10, product_type=product_type, category=category)
data = model_to_dict(product)
data['description'] = (
'<b>bold</b><p><i>italic</i></p><h2>Header</h2><h3>subheader</h3>'
'<blockquote>quote</blockquote>'
'<p><a href="www.mirumee.com">link</a></p>'
'<p>an <script>evil()</script>example</p>')
data['price'] = 20
form = ProductForm(data, instance=product)
assert form.is_valid()
form.save()
assert product.description == (
'<b>bold</b><p><i>italic</i></p><h2>Header</h2><h3>subheader</h3>'
'<blockquote>quote</blockquote>'
'<p><a href="www.mirumee.com">link</a></p>'
'<p>an <script>evil()</script>example</p>')
assert product.seo_description == (
'bolditalicHeadersubheaderquotelinkan evil()example')
def test_product_form_seo_description(unavailable_product):
seo_description = (
'This is a dummy product. '
'HTML <b>shouldn\'t be removed</b> since it\'s a simple text field.')
data = model_to_dict(unavailable_product)
data['price'] = 20
data['description'] = 'a description'
data['seo_description'] = seo_description
form = ProductForm(data, instance=unavailable_product)
assert form.is_valid()
form.save()
assert unavailable_product.seo_description == seo_description
def test_product_form_seo_description_too_long(unavailable_product):
description = (
'Saying it fourth made saw light bring beginning kind over herb '
'won\'t creepeth multiply dry rule divided fish herb cattle greater '
'fly divided midst, gathering can\'t moveth seed greater subdue. '
'Lesser meat living fowl called. Dry don\'t wherein. Doesn\'t above '
'form sixth. Image moving earth without forth light whales. Seas '
'were first form fruit that form they\'re, shall air. And. Good of'
'signs darkness be place. Was. Is form it. Whose. Herb signs stars '
'fill own fruit wherein. '
'Don\'t set man face living fifth Thing the whales were. '
'You fish kind. '
'Them, his under wherein place first you night gathering.')
data = model_to_dict(unavailable_product)
data['price'] = 20
data['description'] = description
form = ProductForm(data, instance=unavailable_product)
assert form.is_valid()
form.save()
new_seo_description = unavailable_product.seo_description
assert len(new_seo_description) <= 300
assert new_seo_description.startswith(
'Saying it fourth made saw light bring beginning kind over herb '
'won\'t creepeth multiply dry rule divided fish herb cattle greater '
'fly divided midst, gathering can\'t moveth seed greater subdue. '
'Lesser meat living fowl called. Dry don\'t wherein. Doesn\'t above '
'form sixth. Image moving earth without')
assert (
new_seo_description.endswith('...') or new_seo_description[-1] == '…')
| 34.583857
| 79
| 0.697907
|
acfe31c1edbeace803f7ff5ad82ed4a44633bead
| 2,888
|
py
|
Python
|
maskrcnn_benchmark/modeling/roi_heads/box_head/box_head.py
|
cxq1/paddle_VinVL
|
f9136871c43b033cd209ddc7579fa986208e37db
|
[
"MIT"
] | null | null | null |
maskrcnn_benchmark/modeling/roi_heads/box_head/box_head.py
|
cxq1/paddle_VinVL
|
f9136871c43b033cd209ddc7579fa986208e37db
|
[
"MIT"
] | null | null | null |
maskrcnn_benchmark/modeling/roi_heads/box_head/box_head.py
|
cxq1/paddle_VinVL
|
f9136871c43b033cd209ddc7579fa986208e37db
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2021 Microsoft Corporation. Licensed under the MIT license.
import paddle
from torch import nn
from .roi_box_feature_extractors import make_roi_box_feature_extractor
from .roi_box_predictors import make_roi_box_predictor
from .inference import make_roi_box_post_processor
from .loss import make_roi_box_loss_evaluator
class ROIBoxHead(paddle.nn.Layer):
"""
Generic Box Head class.
"""
def __init__(self, cfg, in_channels):
super(ROIBoxHead, self).__init__()
self.feature_extractor = make_roi_box_feature_extractor(cfg, in_channels)
self.predictor = make_roi_box_predictor(
cfg, self.feature_extractor.out_channels)
self.post_processor = make_roi_box_post_processor(cfg)
self.loss_evaluator = make_roi_box_loss_evaluator(cfg)
def forward(self, features, proposals, targets=None):
"""
Arguments:
features (list[Tensor]): feature-maps from possibly several levels
proposals (list[BoxList]): proposal boxes
targets (list[BoxList], optional): the ground-truth targets.
Returns:
x (Tensor): the result of the feature extractor
proposals (list[BoxList]): during training, the subsampled proposals
are returned. During testing, the predicted boxlists are returned
losses (dict[Tensor]): During training, returns the losses for the
head. During testing, returns an empty dict.
"""
if self.training:
# Faster R-CNN subsamples during training the proposals with a fixed
# positive / negative ratio
with paddle.no_grad():
proposals = self.loss_evaluator.subsample(proposals, targets)
# extract features that will be fed to the final classifier. The
# feature_extractor generally corresponds to the pooler + heads
x = self.feature_extractor(features, proposals)
# final classifier that converts the features into predictions
class_logits, box_regression = self.predictor(x)
if not self.training:
result = self.post_processor((class_logits, box_regression),
proposals, x)
return x, result, {}
loss_classifier, loss_box_reg = self.loss_evaluator(
[class_logits], [box_regression]
)
return (
x,
proposals,
dict(loss_classifier=loss_classifier, loss_box_reg=loss_box_reg),
)
def build_roi_box_head(cfg, in_channels):
"""
Constructs a new box head.
By default, uses ROIBoxHead, but if it turns out not to be enough, just register a new class
and make it a parameter in the config
"""
return ROIBoxHead(cfg, in_channels)
| 39.027027
| 96
| 0.667244
|
acfe31c62f589b4efebf857fa9dfd0e944f06e0a
| 3,124
|
py
|
Python
|
app/main/views.py
|
markdan58/danmark-blog
|
191e86217cd35e243208108067ee944b20f32115
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
markdan58/danmark-blog
|
191e86217cd35e243208108067ee944b20f32115
|
[
"MIT"
] | 7
|
2021-03-18T21:32:13.000Z
|
2022-03-11T23:34:18.000Z
|
app/main/views.py
|
markdan58/danmark-blog
|
191e86217cd35e243208108067ee944b20f32115
|
[
"MIT"
] | null | null | null |
from flask import render_template,request,redirect,url_for,abort
from ..models import User
from .forms import UpdateProfile
from .. import db,photos
from flask_login import login_required
from flask_login import login_required, current_user
from . import main
from ..auth.forms import LoginForm
from ..models import Newblog, Comment
from .forms import BlogForm, CommentsForm
@main.route('/user/<uname>/update',methods = ['GET','POST'])
@login_required
def update_profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
form = UpdateProfile()
if form.validate_on_submit():
user.bio = form.bio.data
db.session.add(user)
db.session.commit()
return redirect(url_for('.profile',uname=user.username))
return render_template('profile/update.html',form =form)
@main.route('/user/<uname>/update/pic',methods= ['POST'])
@login_required
def update_pic(uname):
user = User.query.filter_by(username = uname).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile',uname=uname))
@main.route('/user/<uname>')
@login_required
def profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
return render_template("Profile/profile.html", user = user)
@main.route('/comments/<int:id>', methods = ['GET','POST'])
@login_required
def comment(id):
comments = Comment.query.filter_by(Newblog_id=id).all()
return render_template('comments.html', comments=comments)
@main.route('/post/comments/new/<int:id>', methods = ['GET', 'POST'])
@login_required
def new_comment(id):
form = CommentsForm()
if form.validate_on_submit():
new_comment = Comment(Newblog_id=id, comments=form.comments.data)
db.session.add(new_comment)
db.session.commit()
return redirect("/")
title = ' comment'
return render_template('new_comment.html',title = title, comment_form=form)
@main.route('/', methods = ['GET','POST'])
@login_required
def new_blog():
form = BlogForm()
posts = Newblog.query.all()
if form.validate_on_submit():
title = form.title.data
New_Post = form.content.data
new_blog = Newblog(actual_blog=New_Post, title=title)
new_blog.save_Newbolg()
return redirect("/")
return render_template('blogs.html', Newblog_form = form, posts=posts)
@main.route("/sports")
def sports():
return render_template('sports.html', title = 'sports')
@main.route("/contactme")
def contactme():
return render_template('contactme.html', title = 'contactme')
@main.route("/education")
def education():
return render_template('education.html', title = 'education')
@main.route("/interests")
def interests():
return render_template('interests.html', title = 'interests')
@main.route("/vision")
def vision():
return render_template('vision.html', title = 'vision')
| 25.606557
| 79
| 0.685019
|
acfe31e1928f6f7eba0cafb5b8b6ec9973099423
| 42
|
py
|
Python
|
ADVECTOR/__init__.py
|
john-science/ADVECTOR
|
5c5ca7595c2c051f1a088b1f0e694936c3da3610
|
[
"MIT"
] | 7
|
2021-09-07T02:32:00.000Z
|
2022-01-15T11:35:02.000Z
|
ADVECTOR/__init__.py
|
TheOceanCleanupAlgorithms/ADVECT
|
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
|
[
"MIT"
] | 1
|
2021-12-24T15:16:26.000Z
|
2021-12-24T15:16:26.000Z
|
ADVECTOR/__init__.py
|
TheOceanCleanupAlgorithms/ADVECT
|
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
|
[
"MIT"
] | 1
|
2021-12-12T15:13:52.000Z
|
2021-12-12T15:13:52.000Z
|
from ADVECTOR._version import __version__
| 21
| 41
| 0.880952
|
acfe344e00c5feedf76d98c0a7a25f3726169335
| 22,823
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_virtual_network_peerings_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 3
|
2020-06-23T02:25:27.000Z
|
2021-09-07T18:48:11.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_virtual_network_peerings_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 510
|
2019-07-17T16:11:19.000Z
|
2021-08-02T08:38:32.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_virtual_network_peerings_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 5
|
2019-09-04T12:51:37.000Z
|
2020-09-16T07:28:40.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkPeering"]
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2019_07_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkPeeringListResult"]
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'} # type: ignore
| 51.870455
| 250
| 0.680103
|
acfe35c4be069e9d2fde87c60b0bc7b97cdc40af
| 12,901
|
py
|
Python
|
chrome/test/chromedriver/test/run_java_tests.py
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/test/chromedriver/test/run_java_tests.py
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/test/chromedriver/test/run_java_tests.py
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs the WebDriver Java acceptance tests.
This script is called from chrome/test/chromedriver/run_all_tests.py and reports
results using the buildbot annotation scheme.
For ChromeDriver documentation, refer to http://code.google.com/p/chromedriver.
"""
import optparse
import os
import shutil
import stat
import sys
import xml.dom.minidom as minidom
_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(1, os.path.join(_THIS_DIR, os.pardir))
import chrome_paths
import test_environment
import util
if util.IsLinux():
sys.path.insert(0, os.path.join(chrome_paths.GetSrc(), 'build', 'android'))
from pylib import constants
class TestResult(object):
"""A result for an attempted single test case."""
def __init__(self, name, time, failure):
"""Initializes a test result.
Args:
name: the full name of the test.
time: the amount of time the test ran, in seconds.
failure: the test error or failure message, or None if the test passed.
"""
self._name = name
self._time = time
self._failure = failure
def GetName(self):
"""Returns the test name."""
return self._name
def GetTime(self):
"""Returns the time it took to run the test."""
return self._time
def IsPass(self):
"""Returns whether the test passed."""
return self._failure is None
def GetFailureMessage(self):
"""Returns the test failure message, or None if the test passed."""
return self._failure
def _Run(java_tests_src_dir, test_filter,
chromedriver_path, chrome_path, log_path, android_package_key,
verbose, debug):
"""Run the WebDriver Java tests and return the test results.
Args:
java_tests_src_dir: the java test source code directory.
test_filter: the filter to use when choosing tests to run. Format is same
as Google C++ Test format.
chromedriver_path: path to ChromeDriver exe.
chrome_path: path to Chrome exe.
log_path: path to server log.
android_package_key: name of Chrome's Android package.
verbose: whether the output should be verbose.
debug: whether the tests should wait until attached by a debugger.
Returns:
A list of |TestResult|s.
"""
test_dir = util.MakeTempDir()
keystore_path = ('java', 'client', 'test', 'keystore')
required_dirs = [keystore_path[:-1],
('javascript',),
('third_party', 'closure', 'goog'),
('third_party', 'js')]
for required_dir in required_dirs:
os.makedirs(os.path.join(test_dir, *required_dir))
test_jar = 'test-standalone.jar'
class_path = test_jar
shutil.copyfile(os.path.join(java_tests_src_dir, 'keystore'),
os.path.join(test_dir, *keystore_path))
util.Unzip(os.path.join(java_tests_src_dir, 'common.zip'), test_dir)
shutil.copyfile(os.path.join(java_tests_src_dir, test_jar),
os.path.join(test_dir, test_jar))
sys_props = ['selenium.browser=chrome',
'webdriver.chrome.driver=' + os.path.abspath(chromedriver_path)]
if chrome_path:
if util.IsLinux() and not util.Is64Bit():
# Workaround for crbug.com/611886 and
# https://bugs.chromium.org/p/chromedriver/issues/detail?id=1695
chrome_wrapper_path = os.path.join(test_dir, 'chrome-wrapper-no-sandbox')
with open(chrome_wrapper_path, 'w') as f:
f.write('#!/bin/sh\n')
f.write('exec %s --no-sandbox --disable-gpu "$@"\n' %
os.path.abspath(chrome_path))
st = os.stat(chrome_wrapper_path)
os.chmod(chrome_wrapper_path, st.st_mode | stat.S_IEXEC)
else:
chrome_wrapper_path = os.path.abspath(chrome_path)
sys_props += ['webdriver.chrome.binary=' + chrome_wrapper_path]
if log_path:
sys_props += ['webdriver.chrome.logfile=' + log_path]
if android_package_key:
android_package = constants.PACKAGE_INFO[android_package_key].package
sys_props += ['webdriver.chrome.android_package=' + android_package]
if android_package_key == 'chromedriver_webview_shell':
android_activity = constants.PACKAGE_INFO[android_package_key].activity
android_process = '%s:main' % android_package
sys_props += ['webdriver.chrome.android_activity=' + android_activity]
sys_props += ['webdriver.chrome.android_process=' + android_process]
if test_filter:
# Test jar actually takes a regex. Convert from glob.
test_filter = test_filter.replace('*', '.*')
sys_props += ['filter=' + test_filter]
jvm_args = []
if debug:
transport = 'dt_socket'
if util.IsWindows():
transport = 'dt_shmem'
jvm_args += ['-agentlib:jdwp=transport=%s,server=y,suspend=y,'
'address=33081' % transport]
# Unpack the sources into the test directory and add to the class path
# for ease of debugging, particularly with jdb.
util.Unzip(os.path.join(java_tests_src_dir, 'test-nodeps-srcs.jar'),
test_dir)
class_path += ':' + test_dir
return _RunAntTest(
test_dir, 'org.openqa.selenium.chrome.ChromeDriverTests',
class_path, sys_props, jvm_args, verbose)
def _RunAntTest(test_dir, test_class, class_path, sys_props, jvm_args, verbose):
"""Runs a single Ant JUnit test suite and returns the |TestResult|s.
Args:
test_dir: the directory to run the tests in.
test_class: the name of the JUnit test suite class to run.
class_path: the Java class path used when running the tests, colon delimited
sys_props: Java system properties to set when running the tests.
jvm_args: Java VM command line args to use.
verbose: whether the output should be verbose.
Returns:
A list of |TestResult|s.
"""
def _CreateBuildConfig(test_name, results_file, class_path, junit_props,
sys_props, jvm_args):
def _SystemPropToXml(prop):
key, value = prop.split('=')
return '<sysproperty key="%s" value="%s"/>' % (key, value)
def _JvmArgToXml(arg):
return '<jvmarg value="%s"/>' % arg
return '\n'.join([
'<project>',
' <target name="test">',
' <junit %s>' % ' '.join(junit_props),
' <formatter type="xml"/>',
' <classpath>',
' <pathelement path="%s"/>' % class_path,
' </classpath>',
' ' + '\n '.join(map(_SystemPropToXml, sys_props)),
' ' + '\n '.join(map(_JvmArgToXml, jvm_args)),
' <test name="%s" outfile="%s"/>' % (test_name, results_file),
' </junit>',
' </target>',
'</project>'])
def _ProcessResults(results_path):
doc = minidom.parse(results_path)
tests = []
for test in doc.getElementsByTagName('testcase'):
name = test.getAttribute('classname') + '.' + test.getAttribute('name')
time = test.getAttribute('time')
failure = None
error_nodes = test.getElementsByTagName('error')
failure_nodes = test.getElementsByTagName('failure')
if error_nodes:
failure = error_nodes[0].childNodes[0].nodeValue
elif failure_nodes:
failure = failure_nodes[0].childNodes[0].nodeValue
tests += [TestResult(name, time, failure)]
return tests
junit_props = ['printsummary="yes"',
'fork="yes"',
'haltonfailure="no"',
'haltonerror="no"']
if verbose:
junit_props += ['showoutput="yes"']
ant_file = open(os.path.join(test_dir, 'build.xml'), 'w')
ant_file.write(_CreateBuildConfig(
test_class, 'results', class_path, junit_props, sys_props, jvm_args))
ant_file.close()
if util.IsWindows():
ant_name = 'ant.bat'
else:
ant_name = 'ant'
code = util.RunCommand([ant_name, 'test'], cwd=test_dir)
if code != 0:
print 'FAILED to run java tests of %s through ant' % test_class
return
return _ProcessResults(os.path.join(test_dir, 'results.xml'))
def PrintTestResults(results):
"""Prints the given results in a format recognized by the buildbot."""
failures = []
failure_names = []
for result in results:
if not result.IsPass():
failures += [result]
failure_names += ['.'.join(result.GetName().split('.')[-2:])]
print 'Ran %s tests' % len(results)
print 'Failed %s:' % len(failures)
util.AddBuildStepText('failed %s/%s' % (len(failures), len(results)))
for result in failures:
print '=' * 80
print '=' * 10, result.GetName(), '(%ss)' % result.GetTime()
print result.GetFailureMessage()
if len(failures) < 10:
util.AddBuildStepText('.'.join(result.GetName().split('.')[-2:]))
print 'Rerun failing tests with filter:', ':'.join(failure_names)
return len(failures)
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--verbose', action='store_true', default=False,
help='Whether output should be verbose')
parser.add_option(
'', '--debug', action='store_true', default=False,
help='Whether to wait to be attached by a debugger')
parser.add_option(
'', '--chromedriver', type='string', default=None,
help='Path to a build of the chromedriver library(REQUIRED!)')
parser.add_option(
'', '--chrome', type='string', default=None,
help='Path to a build of the chrome binary')
parser.add_option(
'', '--log-path',
help='Output verbose server logs to this file')
parser.add_option(
'', '--chrome-version', default='HEAD',
help='Version of chrome. Default is \'HEAD\'')
parser.add_option(
'', '--android-package', help='Android package key')
parser.add_option(
'', '--filter', type='string', default=None,
help='Filter for specifying what tests to run, "*" will run all. E.g., '
'*testShouldReturnTitleOfPageIfSet')
parser.add_option(
'', '--also-run-disabled-tests', action='store_true', default=False,
help='Include disabled tests while running the tests')
parser.add_option(
'', '--isolate-tests', action='store_true', default=False,
help='Relaunch the jar test harness after each test')
options, _ = parser.parse_args()
options.chromedriver = util.GetAbsolutePathOfUserPath(options.chromedriver)
if options.chromedriver is None or not os.path.exists(options.chromedriver):
parser.error('chromedriver is required or the given path is invalid.' +
'Please run "%s --help" for help' % __file__)
if options.android_package:
if options.android_package not in constants.PACKAGE_INFO:
parser.error('Invalid --android-package')
if options.chrome_version != 'HEAD':
parser.error('Android does not support the --chrome-version argument.')
environment = test_environment.AndroidTestEnvironment(
options.android_package)
else:
environment = test_environment.DesktopTestEnvironment(
options.chrome_version)
try:
environment.GlobalSetUp()
# Run passed tests when filter is not provided.
if options.isolate_tests:
test_filters = environment.GetPassedJavaTests()
else:
if options.filter:
test_filter = options.filter
else:
test_filter = '*'
if not options.also_run_disabled_tests:
if '-' in test_filter:
test_filter += ':'
else:
test_filter += '-'
test_filter += ':'.join(environment.GetDisabledJavaTestMatchers())
test_filters = [test_filter]
java_tests_src_dir = os.path.join(chrome_paths.GetSrc(), 'chrome', 'test',
'chromedriver', 'third_party',
'java_tests')
if (not os.path.exists(java_tests_src_dir) or
not os.listdir(java_tests_src_dir)):
java_tests_url = ('https://chromium.googlesource.com/chromium/deps'
'/webdriver')
print ('"%s" is empty or it doesn\'t exist. ' % java_tests_src_dir +
'Need to map ' + java_tests_url + ' to '
'chrome/test/chromedriver/third_party/java_tests in .gclient.\n'
'Alternatively, do:\n'
' $ cd chrome/test/chromedriver/third_party\n'
' $ git clone %s java_tests' % java_tests_url)
return 1
results = []
for filter in test_filters:
results += _Run(
java_tests_src_dir=java_tests_src_dir,
test_filter=filter,
chromedriver_path=options.chromedriver,
chrome_path=util.GetAbsolutePathOfUserPath(options.chrome),
log_path=options.log_path,
android_package_key=options.android_package,
verbose=options.verbose,
debug=options.debug)
return PrintTestResults(results)
finally:
environment.GlobalTearDown()
if __name__ == '__main__':
sys.exit(main())
| 37.071839
| 80
| 0.655143
|
acfe37579baaa040f420fcce27c66754ddf57152
| 11,958
|
py
|
Python
|
the_ark/s3_client.py
|
meltmedia/the-ark
|
d559897494e02a2e2048fdc44014f17af89691bb
|
[
"Apache-2.0"
] | null | null | null |
the_ark/s3_client.py
|
meltmedia/the-ark
|
d559897494e02a2e2048fdc44014f17af89691bb
|
[
"Apache-2.0"
] | 51
|
2015-01-27T18:13:41.000Z
|
2022-03-11T23:16:29.000Z
|
the_ark/s3_client.py
|
meltmedia/the-ark
|
d559897494e02a2e2048fdc44014f17af89691bb
|
[
"Apache-2.0"
] | null | null | null |
import boto.s3.connection
import mimetypes
import os
import shutil
import tempfile
import urllib
import urlparse
import logging
from boto.s3.key import Key
from StringIO import StringIO
logger = logging.getLogger(__name__)
MAX_FILE_SPLITS = 9999
DEFAULT_FILE_SPLIT_SIZE = 6291456
DEFAULT_MINIMUM_SPLIT_AT_SIZE = 20000000
class S3Client(object):
"""A client that helps user to send and get files from S3"""
s3_connection = None
bucket = None
def __init__(self, bucket):
"""
Creates the logger and sets the bucket name that will be used throughout
:param
- bucket: string - The name of the bucket you will be working with
"""
self.bucket_name = bucket
def connect(self):
"""Start the amazon connection using the system's boto.cfg file to retrieve the credentials"""
if self.s3_connection:
return
try:
# - Amazon S3 credentials will use Boto's fall back config, looks for boto.cfg then environment variables
self.s3_connection = boto.s3.connection.S3Connection(
is_secure=False)
self.bucket = self.s3_connection.get_bucket(
self.bucket_name, validate=False)
except Exception as s3_connection_exception:
# - Reset the variables on failure to allow a reconnect
self.s3_connection = None
self.bucket = None
message = "Exception while connecting to S3: {0}".format(s3_connection_exception)
raise S3ClientException(message)
def store_file(self, s3_path, file_to_store, filename, return_url=False, mime_type=None,
chunk_at_size=DEFAULT_MINIMUM_SPLIT_AT_SIZE):
"""
Pushes the desired file up to S3 (e.g. log file).
:param
- s3_path: string - The S3 path to the folder in which you'd like to store the file
- file_to_store: StringIO or string - The fileIO or file local file path for the file to be sent
- filename: string - The name the file will have when on S3. Should include the file extension
- return_url: boolean - Whether to return the path to the file on S3
- mime_type: string - the mime type the file should be saved as, ex: text/html or image/png
- chunk_at_size: int - the size of which the file should be split to multi-upload (default ~ 20 mb)
:return
- file_url: string - The path to the file on S3. This is returned only is return_url is set to true
"""
self.connect()
try:
s3_file = Key(self.bucket)
s3_file.key = self._generate_file_path(s3_path, filename)
# --- Set the Content type for the file being sent (so that it downloads properly)
# - content_type can be 'image/png', 'application/pdf', 'text/plain', etc.
mime_type = mimetypes.guess_type(filename) if mime_type is None else mime_type
s3_file.set_metadata('Content-Type', mime_type)
# - Check if file is a buffer or disk file and if file that is getting uploaded is greater than
# chunk_at_size then upload cool multi style
mutli_part_upload_successful = False
if isinstance(file_to_store, str) and os.path.getsize(file_to_store) > chunk_at_size:
split_file_dir = None
multipart_file = self.bucket.initiate_multipart_upload(key_name=s3_file.key, metadata=s3_file.metadata)
try:
# - Split the file and get it chunky
split_file_dir = self._split_file(file_to_store)
# - Upload the file parts
file_count = 0
for files in os.listdir(split_file_dir):
file_count += 1
file_part = open(os.path.join(split_file_dir, files), 'rb')
multipart_file.upload_part_from_file(file_part, file_count)
# - Complete the upload
multipart_file.complete_upload()
mutli_part_upload_successful = True
except boto.s3.connection.S3ResponseError as s3_error:
logger.warning("A S3 Response error was caught while attempting to chunk and upload the PDF | {}\n"
"Will now attempt to send the file as a whole...".format(s3_error))
multipart_file.cancel_upload()
except Exception as s3_error:
logger.warning("Unexpected Error encountered an issue while chunking and uploading the PDF | {}\n"
"Will now attempt to send the file as a whole...".format(s3_error))
multipart_file.cancel_upload()
finally:
# - Remove the folder from splitting the file
if split_file_dir:
shutil.rmtree(split_file_dir)
# - Upload the file as a whole
if not mutli_part_upload_successful:
file_type = type(file_to_store)
if file_type in [str, unicode]:
s3_file.set_contents_from_filename(file_to_store)
else:
s3_file.set_contents_from_file(file_to_store)
if return_url:
file_key = self.bucket.get_key(s3_file.key)
file_key.set_acl('public-read')
file_url = file_key.generate_url(0, query_auth=False)
# - Certain server side permissions might cause a x-amz-security-token parameter to be added to the url
# Split the url into its pieces
scheme, netloc, path, params, query, fragment = urlparse.urlparse(file_url)
# Check whether the x-amz-security-token parameter was appended to the url and remove it
params = urlparse.parse_qs(query)
if 'x-amz-security-token' in params:
del params['x-amz-security-token']
# Rebuild the params without the x-amz-security-token
query = urllib.urlencode(params)
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
except Exception as store_file_exception:
message = "Exception while storing file on S3: {0}".format(store_file_exception)
raise S3ClientException(message)
def get_file(self, s3_path, file_to_get):
"""
Stores the desired file locally (e.g. configuration file).
:param
- s3_path: string - The S3 path to the folder which contains the file
- file_to_get: string - The name of the file you are looking for in the folder
:return
- retrieved_file StringIO - an IO object containing the content of the file retrieved from S3
"""
self.connect()
try:
if self.verify_file(s3_path, file_to_get):
retrieved_file = StringIO()
s3_file = self.bucket.get_key(
self._generate_file_path(s3_path, file_to_get))
s3_file.get_contents_to_file(retrieved_file)
return retrieved_file
else:
raise S3ClientException("File not found in S3")
except Exception as get_file_exception:
message = "Exception while retrieving file from S3: {0}".format(get_file_exception)
raise S3ClientException(message)
def verify_file(self, s3_path, file_to_verify):
"""
Verifies a file (e.g. configuration file) is on S3 and returns
"True" or "False".
:param
- s3_path: string - The S3 path to the folder which contains the file
- file_to_verify: string - The name of the file you are looking for in the folder
:return
- boolean: True if .get_key returns an instance of a Key object and False if .get_key returns None:
"""
self.connect()
try:
file_path = self._generate_file_path(s3_path, file_to_verify)
s3_file = self.bucket.get_key(file_path)
if s3_file:
return True
else:
return False
except Exception as verify_file_exception:
message = "Exception while verifying file on S3: {0}".format(verify_file_exception)
raise S3ClientException(message)
def _generate_file_path(self, s3_path, file_to_store):
"""
Ensures that the / situation creates a proper path by removing any double slash possibilities
:param
- s3_path: string - The path to the folder you wish to store the file in
- file_to_store: string - The name of the file you wish to store
:return
- string: The concatenated version of the /folder/filename path
"""
return "{0}/{1}".format(s3_path.strip("/"), file_to_store.strip("/"))
def get_all_filenames_in_folder(self, path_to_folder):
"""
Retrieves a list of the files/keys in a folder on S3
:param
- path_to_folder: string - The path to the folder on S3. This should start after the bucket name
:return
- key_list: list - The list of keys in the folder
"""
self.connect()
s3_folder_path = str(path_to_folder)
key_list = self.bucket.list(prefix=s3_folder_path)
return key_list
def get_most_recent_file_from_s3_key_list(self, key_list):
"""
Sorts through the list of files in s3 key list object and returns the most recently modified file in the list
:param
- key_list: list - The list of files returned from a s3.bucket.list() operation
:return
- key boto.s3.Key - The most recently modified file in the key list
"""
most_recent_key = None
for key in key_list:
if not most_recent_key or key.last_modified > most_recent_key.last_modified:
most_recent_key = key
return most_recent_key
def _split_file(self, from_file, file_chunk_size=DEFAULT_FILE_SPLIT_SIZE):
"""
Split a given file into smaller chunks named partXXXX into a temp at a default size of ~ 6 mb. The temp
folder should be deleted after use.
WARNING: You cannot split into more than 9999 files.
:param
- from_file: string - the file to split up
- file_chunk_size: int - number of Bytes each split should contain (Should be > 5 MB for Amazon S3 minimum)
:return:
- temp_dir: string - temp folder location of split file, use to iterate through the split files
"""
if os.path.getsize(from_file) > (MAX_FILE_SPLITS * file_chunk_size):
raise S3ClientException("Could not split the file.\nError: Input file is too large!\n")
elif os.path.getsize(from_file) < DEFAULT_FILE_SPLIT_SIZE:
raise S3ClientException("Could not split the file.\nError: Input file is too small!\n")
try:
temp_dir = tempfile.mkdtemp()
part_num = 0
with open(from_file, 'rb') as input_file:
chunk = input_file.read(file_chunk_size)
while chunk:
part_num += 1
open(os.path.join(temp_dir, ('part%04d' % part_num)), 'wb').write(chunk)
chunk = input_file.read(file_chunk_size)
return temp_dir
except Exception as e:
raise S3ClientException("Could not split the file.\nError: {}\n".format(e))
class S3ClientException(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return self.msg
| 45.124528
| 120
| 0.608965
|
acfe3792aabca90be7d7fbe95dc975f507492e3d
| 2,154
|
py
|
Python
|
setup.py
|
ebellocchia/telegram_periodic_msg_bot
|
183e2dae88504c26deb337fc1ad0845cc0cb8cb8
|
[
"MIT"
] | 4
|
2021-10-09T10:49:55.000Z
|
2022-02-06T22:52:48.000Z
|
setup.py
|
ebellocchia/telegram_periodic_msg_bot
|
183e2dae88504c26deb337fc1ad0845cc0cb8cb8
|
[
"MIT"
] | null | null | null |
setup.py
|
ebellocchia/telegram_periodic_msg_bot
|
183e2dae88504c26deb337fc1ad0845cc0cb8cb8
|
[
"MIT"
] | 1
|
2021-12-04T15:11:09.000Z
|
2021-12-04T15:11:09.000Z
|
import os
import setuptools
import re
# Load long description
def load_long_description(desc_file):
return open(desc_file).read()
# Load version
def load_version(*path_parts):
version_file = os.path.join(*path_parts)
version_line = open(os.path.join(*path_parts)).read().rstrip()
vre = re.compile(r'__version__: str = "([^"]+)"')
matches = vre.findall(version_line)
if matches and len(matches) > 0:
return matches[0]
raise RuntimeError(f"Cannot find version string in {version_file}")
# Load requirements
def load_requirements(req_file):
with open(req_file, "r") as fin:
return [line for line in map(str.strip, fin.read().splitlines())
if len(line) > 0 and not line.startswith("#")]
# Load version
version = load_version("telegram_periodic_msg_bot", "_version.py")
setuptools.setup(
name="telegram_periodic_msg_bot",
version=version,
author="Emanuele Bellocchia",
author_email="ebellocchia@gmail.com",
maintainer="Emanuele Bellocchia",
maintainer_email="ebellocchia@gmail.com",
description="Telegram bot for sending periodic messages",
long_description=load_long_description("README.md"),
long_description_content_type="text/markdown",
url="https://github.com/ebellocchia/telegram_periodic_msg_bot",
download_url="https://github.com/ebellocchia/telegram_periodic_msg_bot/archive/v%s.tar.gz" % version,
license="MIT",
install_requires=load_requirements("requirements.txt"),
packages=setuptools.find_packages(exclude=[]),
package_data={"telegram_periodic_msg_bot": ["lang/lang_en.xml"]},
keywords="telegram, bot, telegram bot, periodic messages",
platforms=["any"],
classifiers=[
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
],
python_requires=">=3.7",
)
| 33.65625
| 105
| 0.687558
|
acfe393e6c93a0199864320e97aa935eba4bef63
| 4,292
|
py
|
Python
|
lib/python2.7/site-packages/networkx/linalg/tests/test_graphmatrix.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | 15
|
2018-04-26T08:17:18.000Z
|
2021-03-05T08:44:13.000Z
|
lib/python2.7/site-packages/networkx/linalg/tests/test_graphmatrix.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | null | null | null |
lib/python2.7/site-packages/networkx/linalg/tests/test_graphmatrix.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | 6
|
2018-04-12T15:49:27.000Z
|
2022-01-27T12:34:50.000Z
|
from nose import SkipTest
import networkx as nx
from networkx.generators.degree_seq import havel_hakimi_graph
class TestGraphMatrix(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global numpy
global assert_equal
global assert_almost_equal
try:
import numpy
import scipy
from numpy.testing import assert_equal,assert_almost_equal
except ImportError:
raise SkipTest('SciPy not available.')
def setUp(self):
deg=[3,2,2,1,0]
self.G=havel_hakimi_graph(deg)
self.OI=numpy.array([[-1, -1, -1, 0],
[1, 0, 0, -1],
[0, 1, 0, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]])
self.A=numpy.array([[0, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3})
for (u,v) in self.G.edges_iter() )
self.WG.add_node(4)
self.WA=numpy.array([[0 , 0.5, 0.5, 0.5, 0],
[0.5, 0 , 0.5, 0 , 0],
[0.5, 0.5, 0 , 0 , 0],
[0.5, 0 , 0 , 0 , 0],
[0 , 0 , 0 , 0 , 0]])
self.MG=nx.MultiGraph(self.G)
self.MG2=self.MG.copy()
self.MG2.add_edge(0,1)
self.MG2A=numpy.array([[0, 2, 1, 1, 0],
[2, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.MGOI=numpy.array([[-1, -1, -1, -1, 0],
[1, 1, 0, 0, -1],
[0, 0, 1, 0, 1],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]])
def test_incidence_matrix(self):
"Conversion to incidence matrix"
assert_equal(nx.incidence_matrix(self.G,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.G).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.MG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG2,oriented=True).todense(),self.MGOI)
assert_equal(nx.incidence_matrix(self.MG2).todense(),numpy.abs(self.MGOI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.WG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,
weight='weight').todense(),0.5*self.OI)
assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(),
numpy.abs(0.5*self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(),
0.3*self.OI)
WMG=nx.MultiGraph(self.WG)
WMG.add_edge(0,1,attr_dict={'weight':0.5,'other':0.3})
assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(),
numpy.abs(0.5*self.MGOI))
assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(),
0.5*self.MGOI)
assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(),
0.3*self.MGOI)
def test_adjacency_matrix(self):
"Conversion to adjacency matrix"
assert_equal(nx.adj_matrix(self.G).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.G,nodelist=[0,1]).todense(),self.A[:2,:2])
assert_equal(nx.adj_matrix(self.WG).todense(),self.WA)
assert_equal(nx.adj_matrix(self.WG,weight=None).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2,weight=None).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.WG,weight='other').todense(),0.6*self.WA)
| 47.688889
| 89
| 0.520503
|
acfe394c8fe8f340d5820e1f663c583ebe712473
| 6,102
|
py
|
Python
|
config/settings/test.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | null | null | null |
config/settings/test.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | 4
|
2021-06-30T10:34:50.000Z
|
2021-06-30T10:34:51.000Z
|
config/settings/test.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | null | null | null |
import environ
environ.Env.read_env() # reads the .env file
env = environ.Env()
from config.settings.common import *
# The automatic connection configuration is disabled during tests because the connection is set up
# using different environment variables in the _es_client pytest fixture
SEARCH_CONFIGURE_CONNECTION_ON_READY = False
# We need to prevent Django from connecting signal receivers when the search app is initialised
# to stop them from firing during non-search tests
SEARCH_CONNECT_SIGNAL_RECEIVERS_ON_READY = False
INSTALLED_APPS += [
'datahub.core.test.support',
'datahub.documents.test.my_entity_document',
'datahub.search.test.search_support',
]
SEARCH_APPS += [
'datahub.search.test.search_support.simplemodel.SimpleModelSearchApp',
'datahub.search.test.search_support.relatedmodel.RelatedModelSearchApp',
]
# Note that the prefix used for indexes created during tests is set dynamically in
# datahub/search/conftest.py (so that tests can be parallelised).
ES_INDEX_PREFIX = 'example-prefix'
ES_INDEX_SETTINGS = {
**ES_INDEX_SETTINGS,
'number_of_shards': 1,
'number_of_replicas': 0,
# Refresh is the process in Elasticsearch that makes newly-indexed documents available for
# querying (see
# https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html
# for more details).
#
# Relying on automatic refreshing in tests leads to flakiness, and so all tests that use
# Elasticsearch explicitly refresh indices after documents have been added to Elasticsearch.
#
# This disables automatic refresh in tests to avoid inadvertently relying on it.
'refresh_interval': -1,
}
DOCUMENT_BUCKET = 'test-bucket'
AV_V2_SERVICE_URL = 'http://av-service/'
OMIS_GENERIC_CONTACT_EMAIL = 'omis@example.com'
OMIS_NOTIFICATION_OVERRIDE_RECIPIENT_EMAIL = ''
OMIS_NOTIFICATION_ADMIN_EMAIL = 'fake-omis-admin@digital.trade.gov.uk'
OMIS_NOTIFICATION_API_KEY = ''
GOVUK_PAY_URL = 'https://payments.example.com/'
INTERACTION_ADMIN_CSV_IMPORT_MAX_SIZE = 5 * 1024
# The default password hasher is intentionally slow and slows downs tests
# See https://docs.djangoproject.com/en/3.0/topics/testing/overview/#password-hashing
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache'
}
}
CELERY_TASK_ALWAYS_EAGER = True
# Stop WhiteNoise emitting warnings when running tests without running collectstatic first
WHITENOISE_AUTOREFRESH = True
WHITENOISE_USE_FINDERS = True
PAAS_IP_WHITELIST = ['1.2.3.4']
DISABLE_PAAS_IP_CHECK = False
HAWK_RECEIVER_CREDENTIALS = {
'some-id': {
'key': 'some-secret',
'scopes': (HawkScope.activity_stream,),
},
'test-id-with-scope': {
'key': 'test-key-with-scope',
'scopes': (next(iter(HawkScope.__members__.values())),),
},
'test-id-without-scope': {
'key': 'test-key-without-scope',
'scopes': (),
},
'test-id-with-multiple-scopes': {
'key': 'test-key-with-multiple-scopes',
'scopes': list(HawkScope.__members__.values())[:2],
},
'test-id-with-metadata-scope': {
'key': 'test-key-with-metadata-scope',
'scopes': (HawkScope.metadata,),
},
'public-company-id': {
'key': 'public-company-key',
'scopes': (HawkScope.public_company,),
},
'data-flow-api-id': {
'key': 'data-flow-api-key',
'scopes': (HawkScope.data_flow_api,),
},
'omis-public-id': {
'key': 'omis-public-key',
'scopes': (HawkScope.public_omis,),
},
}
DOCUMENT_BUCKETS = {
'default': {
'bucket': 'foo',
'aws_access_key_id': 'bar',
'aws_secret_access_key': 'baz',
'aws_region': 'eu-west-2',
},
'investment': {
'bucket': 'foo',
'aws_access_key_id': 'bar',
'aws_secret_access_key': 'baz',
'aws_region': 'eu-west-2',
},
'report': {
'bucket': 'foo',
'aws_access_key_id': 'bar',
'aws_secret_access_key': 'baz',
'aws_region': 'eu-west-2',
}
}
DIT_EMAIL_INGEST_BLACKLIST = [
'blacklisted@trade.gov.uk',
]
DIT_EMAIL_DOMAINS = {
'trade.gov.uk': [['exempt']],
'digital.trade.gov.uk': [['spf', 'pass'], ['dmarc', 'bestguesspass'], ['dkim', 'pass']],
}
ACTIVITY_STREAM_OUTGOING_URL = 'http://activity.stream/'
ACTIVITY_STREAM_OUTGOING_ACCESS_KEY_ID = 'some-outgoing-id'
ACTIVITY_STREAM_OUTGOING_SECRET_ACCESS_KEY = 'some-outgoing-secret'
DATAHUB_NOTIFICATION_API_KEY = None
DNB_SERVICE_BASE_URL = 'http://dnb.service/api/'
DNB_SERVICE_TOKEN = 'dnbtoken1234'
DATAHUB_SUPPORT_EMAIL_ADDRESS = 'support@datahub.com'
STAFF_SSO_BASE_URL = 'http://sso.test/'
STAFF_SSO_AUTH_TOKEN = 'test-sso-token'
ADMIN_OAUTH2_ENABLED = True
ADMIN_OAUTH2_REQUEST_TIMEOUT = 15
ADMIN_OAUTH2_TOKEN_BYTE_LENGTH = 64
ADMIN_OAUTH2_BASE_URL = ''
ADMIN_OAUTH2_TOKEN_FETCH_PATH = 'http://sso-server/o/token/'
ADMIN_OAUTH2_USER_PROFILE_PATH = 'http://sso-server/o/v1/user/me/'
ADMIN_OAUTH2_AUTH_PATH = 'http://sso-server/o/authorize/'
ADMIN_OAUTH2_CLIENT_ID = 'client-id'
ADMIN_OAUTH2_CLIENT_SECRET = 'client-secret'
ADMIN_OAUTH2_LOGOUT_PATH = 'http://sso-server/o/logout'
CONSENT_SERVICE_BASE_URL = 'http://consent.service/'
CONSENT_SERVICE_HAWK_ID = 'some-id'
CONSENT_SERVICE_HAWK_KEY = 'some-secret'
COMPANY_MATCHING_SERVICE_BASE_URL = 'http://content.matching/'
COMPANY_MATCHING_HAWK_ID = 'some-id'
COMPANY_MATCHING_HAWK_KEY = 'some-secret'
EXPORT_WINS_SERVICE_BASE_URL = 'http://content.export-wins/'
EXPORT_WINS_HAWK_ID = 'some-id'
EXPORT_WINS_HAWK_KEY = 'some-secret'
ALLOW_TEST_FIXTURE_SETUP = True
if ADMIN_OAUTH2_ENABLED:
if 'axes.middleware.AxesMiddleware' in MIDDLEWARE:
MIDDLEWARE.remove('axes.middleware.AxesMiddleware')
if 'axes.backends.AxesBackend' in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS.remove('axes.backends.AxesBackend')
BED_USERNAME = 'test-user@digital.trade.gov.uk'
BED_PASSWORD = 'test-password'
BED_TOKEN = 'test-token'
BED_IS_SANDBOX = True
| 31.947644
| 98
| 0.712553
|
acfe3b5b558118b8c54b036f7afce62793140d23
| 2,317
|
py
|
Python
|
epi_forecast_stat_mech/statistical_models/linear_model_test.py
|
HopkinsIDD/EpiForecastStatMech
|
4ba57edff1ece0c56ec6dfa41eac4cfe4a1c66cb
|
[
"Apache-2.0"
] | 4
|
2020-04-11T17:24:00.000Z
|
2022-01-21T09:28:03.000Z
|
epi_forecast_stat_mech/statistical_models/linear_model_test.py
|
HopkinsIDD/EpiForecastStatMech
|
4ba57edff1ece0c56ec6dfa41eac4cfe4a1c66cb
|
[
"Apache-2.0"
] | 2
|
2021-05-03T23:48:22.000Z
|
2022-03-08T12:15:58.000Z
|
epi_forecast_stat_mech/statistical_models/linear_model_test.py
|
HopkinsIDD/EpiForecastStatMech
|
4ba57edff1ece0c56ec6dfa41eac4cfe4a1c66cb
|
[
"Apache-2.0"
] | 3
|
2020-04-13T18:39:56.000Z
|
2020-04-14T02:02:54.000Z
|
# Lint as: python3
"""Tests for epi_forecast_stat_mech.statistical_models.linear_model."""
from absl.testing import parameterized
from epi_forecast_stat_mech.statistical_models import linear_model
from epi_forecast_stat_mech.statistical_models import tree_util
import jax.numpy as jnp
from absl.testing import absltest
class LinearModelTest(parameterized.TestCase):
def assertBroadcastableTo(self, x, shape):
try:
jnp.broadcast_to(x, shape)
except ValueError:
self.fail(f'The value `x` with shape {x.shape} '
f'was not broadcastable to shape {shape}.')
def get_model(self):
return linear_model.LinearModel()
@parameterized.parameters(
dict(covariate_shapes=(5, 5, 5),
mech_param_shapes=(2, 3, 4),
num_epidemics=20),
dict(covariate_shapes=(1, 2, 3, 4, 5),
mech_param_shapes=(100,),
num_epidemics=1000),
)
def testShapes(
self, covariate_shapes, mech_param_shapes, num_epidemics):
covariate_dim = sum(covariate_shapes)
prediction_dim = sum(mech_param_shapes)
params = linear_model.LinearParameters.init(covariate_dim,
prediction_dim)
covariates = tree_util.tree_map(lambda c: jnp.zeros([num_epidemics, c]),
covariate_shapes)
mech_params = tree_util.tree_map(lambda c: jnp.zeros([num_epidemics, c]),
mech_param_shapes)
self.assertEqual(params.alpha.shape, (covariate_dim, prediction_dim))
self.assertEqual(params.intercept.shape, (1, prediction_dim))
model = self.get_model()
# Prior should be broadcastable to the shape of `params`.
log_prior = model.log_prior(params)
packed_params, _ = tree_util.pack(params, axis=0)
self.assertBroadcastableTo(log_prior, packed_params.shape)
# Likelihood should be broadcastable to the shape of `mech_params`.
log_likelihood = model.log_likelihood(params, covariates, mech_params)
packed_mech_params, _ = tree_util.pack(mech_params)
self.assertBroadcastableTo(log_likelihood, packed_mech_params.shape)
prediction = model.predict(params, covariates)
self.assertBroadcastableTo(prediction, packed_mech_params.shape)
if __name__ == '__main__':
absltest.main()
| 35.646154
| 77
| 0.698317
|
acfe3c43765d53202bdd417e5ea72226c33650dc
| 40,992
|
py
|
Python
|
syncplay/messages_it.py
|
AndrewAmmerlaan/syncplay
|
f880d4773238a31ac7046ff290910a4ff4920d53
|
[
"Apache-2.0"
] | null | null | null |
syncplay/messages_it.py
|
AndrewAmmerlaan/syncplay
|
f880d4773238a31ac7046ff290910a4ff4920d53
|
[
"Apache-2.0"
] | null | null | null |
syncplay/messages_it.py
|
AndrewAmmerlaan/syncplay
|
f880d4773238a31ac7046ff290910a4ff4920d53
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf8
"""Italian dictionary"""
it = {
"LANGUAGE": "Italiano",
# Client notifications
"config-cleared-notification": "Impostazioni iniziali ripristinate. I cambiamenti saranno memorizzati quando salverai una configurazione valida.",
"relative-config-notification": "Caricato i file di configurazione relativi: {}",
"connection-attempt-notification": "Tentativo di connessione a {}:{}", # Port, IP
"reconnection-attempt-notification": "Connessione col server persa, tentativo di riconnesione in corso",
"disconnection-notification": "Disconnesso dal server",
"connection-failed-notification": "Connessione col server fallita",
"connected-successful-notification": "Connessione al server effettuata con successo",
"retrying-notification": "%s, Nuovo tentativo in %d secondi...", # Seconds
"reachout-successful-notification": "Collegamento stabilito con {} ({})",
"rewind-notification": "Riavvolgo a causa della differenza temporale con {}", # User
"fastforward-notification": "Avanzamento rapido a causa della differenza temporale con {}", # User
"slowdown-notification": "Rallento a causa della differenza temporale con {}", # User
"revert-notification": "Ripristino la velocità di riproduzione normale",
"pause-notification": "{} ha messo in pausa", # User
"unpause-notification": "{} ha ripreso la riproduzione", # User
"seek-notification": "{} è passato da {} a {}", # User, from time, to time
"current-offset-notification": "Offset corrente: {} secondi", # Offset
"media-directory-list-updated-notification": "Le cartelle multimediali di Syncplay sono state aggiornate.",
"room-join-notification": "{} è entranto nella stanza: '{}'", # User
"left-notification": "{} ha lasciato la stanza", # User
"left-paused-notification": "{} ha lasciato la stanza, {} ha messo in pausa", # User who left, User who paused
"playing-notification": "{} sta riproducendo '{}' ({})", # User, file, duration
"playing-notification/room-addendum": " nella stanza: '{}'", # Room
"not-all-ready": "Non pronti: {}", # Usernames
"all-users-ready": "Tutti i partecipanti sono pronti ({} utenti)", # Number of ready users
"ready-to-unpause-notification": "Ora sei pronto - premi ancora una volta per riprendere la riproduzione",
"set-as-ready-notification": "Ora sei pronto",
"set-as-not-ready-notification": "Non sei pronto",
"autoplaying-notification": "Riproduzione automatica in {}...", # Number of seconds until playback will start
"identifying-as-controller-notification": "Ti sei identificato come gestore della stanza con password '{}'...",
"failed-to-identify-as-controller-notification": "{} ha fallito l'identificazione come gestore della stanza.",
"authenticated-as-controller-notification": "{} autenticato come gestore della stanza",
"created-controlled-room-notification": "Stanza gestita '{}' creata con password '{}'. Per favore salva queste informazioni per una consultazione futura!\n\nIn managed rooms everyone is kept in sync with the room operator(s) who are the only ones who can pause, unpause, seek, and change the playlist.\n\nYou should ask regular viewers to join the room '{}' but the room operators can join the room '{}' to automatically authenticate themselves.", # RoomName, operatorPassword, roomName, roomName:operatorPassword # TODO: Translate
"file-different-notification": "Il file che stai riproducendo sembra essere diverso da quello di {}", # User
"file-differences-notification": "Il tuo file mostra le seguenti differenze: {}", # Differences
"room-file-differences": "Differenze: {}", # File differences (filename, size, and/or duration)
"file-difference-filename": "nome",
"file-difference-filesize": "dimensione",
"file-difference-duration": "durata",
"alone-in-the-room": "Non ci sono altri utenti nella stanza",
"different-filesize-notification": " (la dimensione del tuo file è diversa da quella degli altri partecipanti!)",
"userlist-playing-notification": "{} sta riproducendo:", # Username
"file-played-by-notification": "File: {} è in riproduzione da:", # File
"no-file-played-notification": "{} non sta riproducendo alcun file", # Username
"notplaying-notification": "Partecipanti che non stanno riproducendo alcun file:",
"userlist-room-notification": "Nella stanza '{}':", # Room
"userlist-file-notification": "File",
"controller-userlist-userflag": "Gestore",
"ready-userlist-userflag": "Pronto",
"update-check-failed-notification": "Controllo automatico degli aggiornamenti di Syncplay {} fallito. Vuoi visitare https://syncplay.pl/ per verificare manualmente la presenza di aggiornamenti?", # Syncplay version
"syncplay-uptodate-notification": "Syncplay è aggiornato",
"syncplay-updateavailable-notification": "Una nuova versione di Syncplay è disponibile. Vuoi visitare la pagina delle release?",
"mplayer-file-required-notification": "Utilizzare Syncplay con mplayer di selezionare il file all'avvio",
"mplayer-file-required-notification/example": "Esempio di utilizzo: syncplay [opzioni] [url|percorso/]nomefile",
"mplayer2-required": "Syncplay non è compatibile con MPlayer 1.x, per favore utilizza mplayer2 or mpv",
"unrecognized-command-notification": "Comando non riconosciuto",
"commandlist-notification": "Comandi disponibili:",
"commandlist-notification/room": "\tr [nome] - cambia stanza",
"commandlist-notification/list": "\tl - mostra la lista di utenti",
"commandlist-notification/undo": "\tu - annulla l'ultima ricerca",
"commandlist-notification/pause": "\tp - attiva o disattiva la pausa",
"commandlist-notification/seek": "\t[s][+-]tempo - salta all'istante di tempo dato, se + o - non è specificato si considera il tempo assoluto in secondi o min:sec",
"commandlist-notification/help": "\th - mostra questo help",
"commandlist-notification/toggle": "\tt - attiva o disattiva la funzionalità \"pronto\"",
"commandlist-notification/create": "\tc [nome] - crea una stanza gestita usando il nome della stanza attuale",
"commandlist-notification/auth": "\ta [password] - autentica come gestore della stanza, utilizzando la password del gestore",
"commandlist-notification/chat": "\tch [message] - invia un messaggio nella chat della stanza",
"commandList-notification/queue": "\tqa [file/url] - add file or url to bottom of playlist", # TO DO: Translate
"commandList-notification/playlist": "\tql - show the current playlist", # TO DO: Translate
"commandList-notification/select": "\tqs [index] - select given entry in the playlist", # TO DO: Translate
"commandList-notification/delete": "\tqd [index] - delete the given entry from the playlist", # TO DO: Translate
"syncplay-version-notification": "Versione di Syncplay: {}", # syncplay.version
"more-info-notification": "Maggiori informazioni a: {}", # projectURL
"gui-data-cleared-notification": "Syncplay ha ripristinato i dati dell'interfaccia relativi ai percorsi e allo stato delle finestre.",
"language-changed-msgbox-label": "La lingua sarà cambiata quando avvierai Syncplay.",
"promptforupdate-label": "Ti piacerebbe che, di tanto in tanto, Syncplay controllasse automaticamente la presenza di aggiornamenti?",
"media-player-latency-warning": "Attenzione: il media player ha impiegato {} secondi per rispondere. Se stai avendo problemi di sincronizzazione, chiudi delle applicazioni per liberare le risorse di sistema e, se ciò non dovesse avere alcun effetto, prova un altro media player.", # Seconds to respond
"mpv-unresponsive-error": "mpv non ha risposto per {} secondi, quindi sembra non funzionare correttamente. Per favore, riavvia Syncplay.", # Seconds to respond
# Client prompts
"enter-to-exit-prompt": "Premi Invio per uscire\n",
# Client errors
"missing-arguments-error": "Alcuni argomenti obbligatori non sono stati trovati. Fai riferimento a --help",
"server-timeout-error": "Connessione col server scaduta",
"mpc-slave-error": "Non è possibile avviare MPC in modalità slave!",
"mpc-version-insufficient-error": "La tua versione di MPC è troppo vecchia, per favore usa `mpc-hc` >= `{}`",
"mpc-be-version-insufficient-error": "La tua versione di MPC è troppo vecchia, per favore usa `mpc-be` >= `{}`",
"mpv-version-error": "Syncplay non è compatibile con questa versione di mpv. Per favore usa un'altra versione di mpv (es. Git HEAD).",
"mpv-failed-advice": "The reason mpv cannot start may be due to the use of unsupported command line arguments or an unsupported version of mpv.", # TODO: Translate
"player-file-open-error": "Il player non è riuscito ad aprire il file",
"player-path-error": "Il path del player non è configurato correttamente. I player supportati sono: mpv, mpv.net, VLC, MPC-HC, MPC-BE e mplayer2",
"hostname-empty-error": "Il campo hostname non può essere vuoto",
"empty-error": "Il campo {} non può esssere vuoto", # Configuration
"media-player-error": "Errore media player: \"{}\"", # Error line
"unable-import-gui-error": "Non è possibile importare le librerie di interfaccia grafica. Hai bisogno di PySide per poter utilizzare l'interfaccia grafica.",
"unable-import-twisted-error": "Non è possibile importare Twisted. Si prega di installare Twisted v16.4.0 o superiore.",
"arguments-missing-error": "Alcuni argomenti obbligatori non sono stati trovati. Fai riferimento a --help",
"unable-to-start-client-error": "Impossibile avviare il client",
"player-path-config-error": "Il percorso del player non è configurato correttamente. I player supportati sono: mpv, mpv.net, VLC, MPC-HC, MPC-BE e mplayer2.",
"no-file-path-config-error": "Deve essere selezionato un file prima di avviare il player",
"no-hostname-config-error": "Il campo hostname non può essere vuoto",
"invalid-port-config-error": "La porta deve essere valida",
"empty-value-config-error": "Il campo {} non può essere vuoto", # Config option
"not-json-error": "Non è una stringa con codifica JSON\n",
"hello-arguments-error": "Not enough Hello arguments\n", # DO NOT TRANSLATE
"version-mismatch-error": "La versione del client è diversa da quella del server\n",
"vlc-failed-connection": "Impossibile collegarsi a VLC. Se non hai installato syncplay.lua e stai usando l'ultima versione di VLC, fai riferimento a https://syncplay.pl/LUA/ per istruzioni. Syncplay and VLC 4 are not currently compatible, so either use VLC 3 or an alternative such as mpv.", # TO DO: TRANSLATE
"vlc-failed-noscript": "VLC ha segnalato che lo script di interfaccia syncplay.lua non è stato installato. Per favore, fai riferimento a https://syncplay.pl/LUA/ per istruzioni.",
"vlc-failed-versioncheck": "Questa versione di VLC non è supportata da Syncplay.",
"vlc-initial-warning": 'VLC does not always provide accurate position information to Syncplay, especially for .mp4 and .avi files. If you experience problems with erroneous seeking then please try an alternative media player such as <a href="https://mpv.io/">mpv</a> (or <a href="https://github.com/stax76/mpv.net/">mpv.net</a> for Windows users).', # TODO: Translate
"feature-sharedPlaylists": "playlist condivise", # used for not-supported-by-server-error
"feature-chat": "chat", # used for not-supported-by-server-error
"feature-readiness": "pronto", # used for not-supported-by-server-error
"feature-managedRooms": "stanze gestite", # used for not-supported-by-server-error
"not-supported-by-server-error": "La feature {} non è supportata da questo server..", # feature
"shared-playlists-not-supported-by-server-error": "Le playlist condivise potrebbero non essere supportata dal server. È necessario un server con Syncplay {}+ per assicurarsi che funzionino correttamente, tuttavia il server sta utilizzando Syncplay {}.", # minVersion, serverVersion
"shared-playlists-disabled-by-server-error": "Le playlist condivise sono state disabilitate nella configurazione del server. Per utilizzarle, dovrai collegarti a un altro server.",
"invalid-seek-value": "Valore di ricerca non valido",
"invalid-offset-value": "Valore di offset non valido",
"switch-file-not-found-error": "Impossibile selezionare il file '{0}'. Syncplay osserva solo le cartelle multimediali specificate.", # File not found
"folder-search-timeout-error": "La ricerca nelle cartelle multimediali è stata interrotta perché l'analisi di '{}' sta impiegando troppo tempo. Ciò accade se si aggiunge nella lista di ricerca una cartella con troppe sottocartelle. Per riabilitare la selezione automatica dei file seleziona File->Imposta cartelle multimediali nella barra dei menù e rimuovi questa cartella, o sostituiscila con una sottocartella appropriata. Se la cartella è idonea, è possibile riabilitarla selezionando File->Imposta cartelle multimediali e premendo 'OK'.", # Folder
"folder-search-first-file-timeout-error": "La ricerca dei media in '{}' è stata interrotta perché l'accesso alla cartella sta impiegando troppo tempo. Ciò accade se questa si trova in un disco di rete oppure se hai impostato il blocco della rotazione del disco rigido dopo un certo periodo di inattività. Per riabilitare la selezione automatica dei file seleziona File->Imposta cartelle multimediali, quindi rimuovi la cartella oppure risolvi il problema (es. cambiando le impostazioni di risparmio energetico).", # Folder
"added-file-not-in-media-directory-error": "Hai selezionato un file in '{}', che non è impostata come cartella multimediale. Puoi aggiungerla come cartella multimediale selezionando File->Imposta cartelle multimediali nella barra dei menù.", # Folder
"no-media-directories-error": "Nessuna cartella multimediale è stata configurata. Per permettere il corretto funzionamento delle playlist condivise e la selezione automatica dei file, naviga in File->Imposta cartelle multimediali e specifica dove Syncplay deve ricercare i file multimediali.",
"cannot-find-directory-error": "Impossibile trovare la cartella multimediale '{}'. Per aggiornare la lista delle cartelle multimediali seleziona File->Imposta cartelle multimediali dalla barra dei menù e specifica dove Syncplay deve ricercare i file multimediali.",
"failed-to-load-server-list-error": "Impossibile caricare la lista dei server pubblici. Per favore, visita https://www.syncplay.pl/ con il tuo browser.",
# Client arguments
"argument-description": 'Programma per sincronizzare la riproduzione di media player multipli attraverso la rete.',
"argument-epilog": 'Se non è specificata alcuna opzione saranno utilizzati i valori _config',
"nogui-argument": 'non mostrare l\'interfaccia grafica',
"host-argument": 'indirizzo del server',
"name-argument": 'username desiderato',
"debug-argument": 'modalità debug',
"force-gui-prompt-argument": 'mostra la finestra di configurazione',
"no-store-argument": 'non salvare i valori in .syncplay',
"room-argument": 'stanza di default',
"password-argument": 'password del server',
"player-path-argument": 'percorso dell\'eseguibile del tuo player',
"file-argument": 'file da riprodurre',
"args-argument": 'opzioni del player, se hai bisogno di utilizzare opzioni che iniziano con - anteponi un singolo \'--\'',
"clear-gui-data-argument": 'ripristina il percorso e i dati impostati tramite interfaccia grafica e salvati come QSettings',
"language-argument": 'lingua per i messaggi di Syncplay (de/en/ru/it/es/pt_BR/pt_PT)',
"version-argument": 'mostra la tua versione',
"version-message": "Stai usando la versione di Syncplay {} ({})",
"load-playlist-from-file-argument": "loads playlist from text file (one entry per line)", # TODO: Translate
# Client labels
"config-window-title": "Configurazione di Syncplay",
"connection-group-title": "Impostazioni di connessione",
"host-label": "Indirizzo del server: ",
"name-label": "Username (opzionale):",
"password-label": "Password del server (se necessaria):",
"room-label": "Stanza di default: ",
"roomlist-msgbox-label": "Edit room list (one per line)", # TODO: Translate
"media-setting-title": "Impostazioni del media player",
"executable-path-label": "Percorso del media player:",
"media-path-label": "Percorso del video (opzionale):",
"player-arguments-label": "Opzioni del player (se necessarie):",
"browse-label": "Sfoglia",
"update-server-list-label": "Aggiorna lista",
"more-title": "Mostra altre impostazioni",
"never-rewind-value": "Mai",
"seconds-suffix": " sec",
"privacy-sendraw-option": "Invio semplice",
"privacy-sendhashed-option": "Invio cifrato",
"privacy-dontsend-option": "Non inviare",
"filename-privacy-label": "Nome del file:",
"filesize-privacy-label": "Dimensione del file:",
"checkforupdatesautomatically-label": "Controlla automaticamente gli aggiornamenti di Syncplay",
"autosavejoinstolist-label": "Add rooms you join to the room list", # TO DO: Translate
"slowondesync-label": "Rallenta in caso di sfasamento minimo (non supportato su MPC-HC/BE)",
"rewindondesync-label": "Riavvolgi in caso di grande sfasamento (consigliato)",
"fastforwardondesync-label": "Avanzamento rapido in caso di ritardo (consigliato)",
"dontslowdownwithme-label": "Non rallentare o riavvolgere gli altri utenti (sperimentale)",
"pausing-title": "Pausa",
"pauseonleave-label": "Metti in pausa quando gli altri utenti lasciano la stanza (es. disconnessione)",
"readiness-title": "Stato iniziale di 'pronto'",
"readyatstart-label": "Imposta sempre il mio stato come \"pronto\" a guardare",
"forceguiprompt-label": "Non mostrare la finestra di configurazione di Syncplay a ogni avvio", # (Inverted)
"showosd-label": "Abilita i messaggi OSD",
"showosdwarnings-label": "Mostra gli avvisi (es. file differenti, utenti non pronti)",
"showsameroomosd-label": "Mostra gli eventi della tua stanza",
"shownoncontrollerosd-label": "Mostra gli eventi dei non gestori nelle stanze gestite",
"showdifferentroomosd-label": "Mostra gli eventi di altre stanze",
"showslowdownosd-label": "Mostra le notifiche di rallentamento / riavvolgimento",
"language-label": "Lingua:",
"automatic-language": "Predefinita ({})", # Default language
"showdurationnotification-label": "Avvisa in caso di mancata corrispondenza della durata del file",
"basics-label": "Generali",
"readiness-label": "Play/Pausa",
"misc-label": "Varie",
"core-behaviour-title": "Comportamento principale della stanza",
"syncplay-internals-title": "Funzionamento di Syncplay",
"syncplay-mediasearchdirectories-title": "Cartelle contenenti i file multimediali",
"syncplay-mediasearchdirectories-label": "Cartelle contenenti i file multimediali (un solo percorso per riga)",
"sync-label": "Sincronia", # don't have better options as the label won't fit in the panel.
"sync-otherslagging-title": "Se gli altri partecipanti non sono sincronizzati...",
"sync-youlaggging-title": "Se tu sei non sei sincronizzato...",
"messages-label": "Messaggi",
"messages-osd-title": "Impostazioni On-Screen Display",
"messages-other-title": "Altre impostazioni dello schermo",
"chat-label": "Chat",
"privacy-label": "Privacy", # Currently unused, but will be brought back if more space is needed in Misc tab
"privacy-title": "Impostazioni privacy",
"unpause-title": "Premendo play, imposta il tuo stato su \"pronto\" e:",
"unpause-ifalreadyready-option": "Riprendi la riproduzione se eri già pronto",
"unpause-ifothersready-option": "Riprendi la riproduzione se eri già pronto o se gli altri partecipanti sono pronti (default)",
"unpause-ifminusersready-option": "Riprendi la riproduzione se eri già pronto o se un numero minimo di partecipanti è pronto",
"unpause-always": "Riprendi sempre la riproduzione",
"syncplay-trusteddomains-title": "Domini fidati (per streaming e i contenuti in rete)",
"chat-title": "Inserimento messaggi di chat",
"chatinputenabled-label": "Abilita la chat su mpv",
"chatdirectinput-label": "Abilita la chat istantanea (evita di dover premere Invio per chattare)",
"chatinputfont-label": "Font dell'input della chat",
"chatfont-label": "Imposta font",
"chatcolour-label": "Imposta colore",
"chatinputposition-label": "Posizione dell'area di inserimento testo in mpv",
"chat-top-option": "In alto",
"chat-middle-option": "Al centro",
"chat-bottom-option": "In basso",
"chatoutputheader-label": "Output messaggi di chat",
"chatoutputfont-label": "Font dell'output della chat",
"chatoutputenabled-label": "Abilita l'output della chat nel media player (al momento solo mpv è supportato)",
"chatoutputposition-label": "Modalità di output",
"chat-chatroom-option": "Stile chatroom",
"chat-scrolling-option": "A scorrimento",
"mpv-key-tab-hint": "[TAB] per attivare le scorciatoie da tastiera e disattivare la chat.",
"mpv-key-hint": "[Invio] per inviare un messaggio. [Esc] per uscire dalla modalità chat.",
"alphakey-mode-warning-first-line": "Puoi utilizzare temporaneamente i vecchi comandi di mpv con i tasti a-z.",
"alphakey-mode-warning-second-line": "Premi [TAB] per ritornare alla modalità chat di Syncplay.",
"help-label": "Aiuto",
"reset-label": "Elimina configurazione",
"run-label": "Avvia Syncplay",
"storeandrun-label": "Salva la configurazione e avvia Syncplay",
"contact-label": "Sentiti libero di inviare un'e-mail a <a href=\"mailto:dev@syncplay.pl\"><nobr>dev@syncplay.pl</nobr></a>, chattare tramite il <a href=\"https://webchat.freenode.net/?channels=#syncplay\"><nobr>canale IRC #Syncplay</nobr></a> su irc.freenode.net, <a href=\"https://github.com/Uriziel/syncplay/issues\"><nobr>segnalare un problema</nobr></a> su GitHub, <a href=\"https://www.facebook.com/SyncplaySoftware\"><nobr>lasciare un like sulla nostra pagina Facebook</nobr></a>, <a href=\"https://twitter.com/Syncplay/\"><nobr>seguirci su Twitter</nobr></a>, o visitare <a href=\"https://syncplay.pl/\"><nobr>https://syncplay.pl/</nobr></a>. Non usare Syncplay per inviare dati sensibili.",
"joinroom-label": "Entra nella stanza",
"joinroom-menu-label": "Entra nella stanza {}",
"seektime-menu-label": "Vai a...",
"undoseek-menu-label": "Annulla vai a...",
"play-menu-label": "Play",
"pause-menu-label": "Pausa",
"playbackbuttons-menu-label": "Mostra i controlli della riproduzione",
"autoplay-menu-label": "Mostra il tasto di riproduzione automatica",
"autoplay-guipushbuttonlabel": "Riproduci quando tutti sono pronti",
"autoplay-minimum-label": "Minimo utenti pronti:",
"sendmessage-label": "Invia",
"ready-guipushbuttonlabel": "Sono pronto a guardare!",
"roomuser-heading-label": "Stanza / Utente",
"size-heading-label": "Dimensione",
"duration-heading-label": "Durata",
"filename-heading-label": "Nome del file",
"notifications-heading-label": "Notifiche",
"userlist-heading-label": "Lista degli utenti nella stanza",
"browseformedia-label": "Seleziona i file multimediali",
"file-menu-label": "&File", # & precedes shortcut key
"openmedia-menu-label": "&Apri file multimediali",
"openstreamurl-menu-label": "Apri indirizzo di &rete",
"setmediadirectories-menu-label": "Imposta &cartelle multimediali",
"loadplaylistfromfile-menu-label": "&Load playlist from file", # TODO: Translate
"saveplaylisttofile-menu-label": "&Save playlist to file", # TODO: Translate
"exit-menu-label": "&Esci",
"advanced-menu-label": "&Avanzate",
"window-menu-label": "&Finestra",
"setoffset-menu-label": "Imposta &offset",
"createcontrolledroom-menu-label": "&Crea stanza gestita",
"identifyascontroller-menu-label": "&Identificati come operatore della stanza",
"settrusteddomains-menu-label": "Imposta &domini fidati",
"addtrusteddomain-menu-label": "Aggiungi {} come dominio fidato", # Domain
"edit-menu-label": "&Modifica",
"cut-menu-label": "&Taglia",
"copy-menu-label": "&Copia",
"paste-menu-label": "&Incolla",
"selectall-menu-label": "&Seleziona tutto",
"playback-menu-label": "&Riproduzione",
"help-menu-label": "&Aiuto",
"userguide-menu-label": "Apri guida &utente",
"update-menu-label": "Controlla la presenza di &aggiornamenti",
"startTLS-initiated": "Tentativo di connessione sicura in corso",
"startTLS-secure-connection-ok": "Connessione sicura stabilita ({})",
"startTLS-server-certificate-invalid": 'Connessione sicura non riuscita. Il certificato di sicurezza di questo server non è valido. La comunicazione potrebbe essere intercettata da una terza parte. Per ulteriori dettagli e informazioni sulla risoluzione del problema, clicca <a href="https://syncplay.pl/trouble">qui</a>.',
"startTLS-server-certificate-invalid-DNS-ID": "Syncplay does not trust this server because it uses a certificate that is not valid for its hostname.", # TODO: Translate
"startTLS-not-supported-client": "Questo client non supporta TLS",
"startTLS-not-supported-server": "Questo server non supporta TLS",
# TLS certificate dialog
"tls-information-title": "Informazioni sul certificato",
"tls-dialog-status-label": "<strong>Syncplay è connesso a {} tramite una connessione codificata.</strong>",
"tls-dialog-desc-label": "La codifica con un certificato digitale mantiene private le informazioni quando vengono<br/>inviate dal/al server {}.",
"tls-dialog-connection-label": "Informazioni codificate usando Transport Layer Security (TLS), versione {} usando gli<br/>algoritmi di cifratura: {}.",
"tls-dialog-certificate-label": "Certificato rilasciato da {} valido fino al {}.",
# About dialog
"about-menu-label": "&Informazioni su Syncplay",
"about-dialog-title": "Informazioni su Syncplay",
"about-dialog-release": "Versione {} release {}",
"about-dialog-license-text": "Rilasciato sotto Apache License, Version 2.0",
"about-dialog-license-button": "Licenza",
"about-dialog-dependencies": "Dipendenze",
"setoffset-msgbox-label": "Imposta offset",
"offsetinfo-msgbox-label": "Offset (vedi https://syncplay.pl/guide/ per istruzioni):",
"promptforstreamurl-msgbox-label": "Apri URL",
"promptforstreamurlinfo-msgbox-label": "Indirizzo di rete",
"addfolder-label": "Aggiungi cartella",
"adduris-msgbox-label": "Aggiungi gli indirizzi alla playlist (uno per riga)",
"editplaylist-msgbox-label": "Imposta playlist (una per riga)",
"trusteddomains-msgbox-label": "Domini a cui è lecito passare automaticamente (uno per riga)",
"createcontrolledroom-msgbox-label": "Crea stanza gestita",
"controlledroominfo-msgbox-label": "Inserisci il nome della stanza gestita\r\n(vedi https://syncplay.pl/guide/ per istruzioni):",
"identifyascontroller-msgbox-label": "Identificati come operatore della stanza",
"identifyinfo-msgbox-label": "Inserisci la password dell'operatore per questa stanza\r\n(vedi https://syncplay.pl/guide/ per istruzioni):",
"public-server-msgbox-label": "Seleziona il server pubblico per questa sessione",
"megabyte-suffix": " MB",
# Tooltips
"host-tooltip": "Hostname o indirizzo IP a cui collegarsi e, se necessario, includere la porta (es. syncplay.pl:8999). La sincronizzazione avviene solo con gli utenti collegati allo stesso server/porta.",
"name-tooltip": "Il nome utente con cui sarai riconosciuto. Nessuna registrazione necessaria, cosi potrai sempre cambiarlo. Se lasciato vuoto, viene scelto un nome casuale.",
"password-tooltip": "La password è necessaria solo in caso di connessione a server privati.",
"room-tooltip": "La stanza in cui entrare dopo la connessione. Può assumere qualsiasi nome, ma ricorda che sarai sincronizzato solo con gli utenti nella stessa stanza.",
"edit-rooms-tooltip": "Edit room list.", # TO DO: Translate
"executable-path-tooltip": "Percorso del media player desiderato (scegliere tra mpv, mpv.net, VLC, MPC-HC/BE or mplayer2).",
"media-path-tooltip": "Percorso del video o stream da aprire. Necessario per mplayer2.",
"player-arguments-tooltip": "Argomenti da linea di comando aggiuntivi da passare al media player scelto.",
"mediasearcdirectories-arguments-tooltip": "Cartelle dove Syncplay cercherà i file multimediali, es. quando usi la funzione click to switch. Syncplay cercherà anche nelle sottocartelle.",
"more-tooltip": "Mostra le impostazioni usate meno frequentemente.",
"filename-privacy-tooltip": "Modalità di invio al server del nome del file attualmente in riproduzione.",
"filesize-privacy-tooltip": "Modalità di invio al server della dimensione del file attualmente in riproduzione.",
"privacy-sendraw-tooltip": "Invia questa informazione in chiaro. Questa è l'impostazione predefinita per la maggior parte delle funzionalità.",
"privacy-sendhashed-tooltip": "Invia una versione cifrata dell'informazione, rendendola meno visibile agli altri client.",
"privacy-dontsend-tooltip": "Non inviare questa informazione al server. Questo garantisce massima privacy.",
"checkforupdatesautomatically-tooltip": "Controlla regolarmente la presenza di nuove versioni di Syncplay.",
"autosavejoinstolist-tooltip": "When you join a room in a server, automatically remember the room name in the list of rooms to join.", # TO DO: Translate
"slowondesync-tooltip": "Riduce temporaneamente la velocità di riproduzione quando c'è bisogno di sincronizzarti con gli altri utenti. Non supportato su MPC-HC/BE.",
"dontslowdownwithme-tooltip": "Gli altri utenti non vengono rallentati se non sei sincronizzato. Utile per i gestori della stanza.",
"pauseonleave-tooltip": "Mette in pausa la riproduzione se vieni disconnesso o se qualcuno lascia la stanza.",
"readyatstart-tooltip": "Imposta il tuo stato su \"pronto\" all'avvio (in caso contrario, sarai su \"non pronto\" finché non cambierai il tuo stato)",
"forceguiprompt-tooltip": "La finestra di configurazione non viene mostrata quando apri Syncplay.",
"nostore-tooltip": "Avvia Syncplay con la configurazione scelta, ma non salva le impostazioni.",
"rewindondesync-tooltip": "Torna indietro quando necessario per ristabilire la sincronizzazione. Disabilitare quest'opzione può causare gravi problemi di sincronizzazione!",
"fastforwardondesync-tooltip": "Avanza rapidamente quando non sei sincronizzato col gestore della stanza (usa una posizione fittizia se 'Non rallentare o riavvolgere gli altri utenti' è abilitato).",
"showosd-tooltip": "Invia i messaggi di Syncplay al media player tramite OSD.",
"showosdwarnings-tooltip": "Mostra gli avvisi in caso di riproduzione di un file differente, se sei l'unico utente nella stanza, se ci sono utenti non pronti, ecc.",
"showsameroomosd-tooltip": "Mostra le notifiche OSD per gli eventi relativi alla stanza in cui si trova l'utente.",
"shownoncontrollerosd-tooltip": "Mostra le notifiche OSD per gli eventi relativi ai non operatori presenti nelle stanze gestite.",
"showdifferentroomosd-tooltip": "Mostra le notifiche OSD per gli eventi relativi alle stanze in cui l'utente non si trova.",
"showslowdownosd-tooltip": "Mostra le notifiche di rallentamento / riavvolgimento in caso di differenza temporale.",
"showdurationnotification-tooltip": "Utile quando manca un segmento di un file con più parti. Può causare dei falsi positivi.",
"language-tooltip": "Lingua da utilizzare in Syncplay.",
"unpause-always-tooltip": "Se riprendi la riproduzione, il tuo stato cambia in \"pronto\" e la riproduzione viene avviata, piuttosto che impostarti solo su pronto.",
"unpause-ifalreadyready-tooltip": "Se riprendi la riproduzione quando non sei \"pronto\", verrai impostato su pronto - ripeti il comando ancora una volta per avviare la riproduzione.",
"unpause-ifothersready-tooltip": "Se riprendi la riproduzione quando non sei \"pronto\" la riproduzione verrà avviata solo se gli altri sono pronti.",
"unpause-ifminusersready-tooltip": "Se riprendi la riproduzione quando non sei \"pronto\", la riproduzione verrà avviata solo se un numero minimo di utenti è \"pronto\".",
"trusteddomains-arguments-tooltip": "Domini verso cui è possibile collegarsi automaticamente quando le playlist condivise sono abilitate.",
"chatinputenabled-tooltip": "Abilita l'input della chat in mpv (premi Invio per chattare, per inviare ed Esc per cancellare)",
"chatdirectinput-tooltip": "Evita di dover premere Invio per aprire l'input della chat in mpv. Premi TAB in mpv per disabilitare temporaneamente questa funzione.",
"font-label-tooltip": "Font usato nell'input della chat in mpv. Non influenza cosa vedono gli altri, vale solo per te.",
"set-input-font-tooltip": "Font usato nell'input della chat in mpv. Non influenza cosa vedono gli altri, vale solo per te.",
"set-input-colour-tooltip": "Colore del font usato nell'input della chat in mpv. Non influenza cosa vedono gli altri, vale solo per te.",
"chatinputposition-tooltip": "Posizione dell'input della chat in mpv quando premi Invio.",
"chatinputposition-top-tooltip": "Posiziona l'input della chat in cima alla finestra di mpv.",
"chatinputposition-middle-tooltip": "Posizione l'input della chat al centro della finestra di mpv.",
"chatinputposition-bottom-tooltip": "Posiziona l'input della chat in basso alla finestra di mpv.",
"chatoutputenabled-tooltip": "Mostra i messaggi di chat nell'OSD (se supportato dal media player).",
"font-output-label-tooltip": "Font dell'output della chat.",
"set-output-font-tooltip": "Font usato per mostrare i messaggi di chat.",
"chatoutputmode-tooltip": "Come sono mostrati i messaggi di chat.",
"chatoutputmode-chatroom-tooltip": "Mostra i nuovi messaggi di chat al di sotto di quelli precedenti.",
"chatoutputmode-scrolling-tooltip": "Scorri il testo della chat da destra a sinistra.",
"help-tooltip": "Apri la guida utente su syncplay.pl.",
"reset-tooltip": "Ripristina le impostazioni iniziali di Syncplay.",
"update-server-list-tooltip": "Scarica la lista dei server pubblici da syncplay.pl.",
"sslconnection-tooltip": "Connessione sicura al server. Clicca per informazioni sul certificato.",
"joinroom-tooltip": "Lascia la stanza attuale e entra in quella specificata.",
"seektime-msgbox-label": "Salta all'istante di tempo specificato (in secondi / min:sec). Usa +/- per una ricerca relativa.",
"ready-tooltip": "Indica quando sei pronto a guardare.",
"autoplay-tooltip": "Avvia la riproduzione automatica quando il numero minimo di utenti è pronto.",
"switch-to-file-tooltip": "Doppio click per passare a {}", # Filename
"sendmessage-tooltip": "Invia il messaggio alla stanza",
# In-userlist notes (GUI)
"differentsize-note": "Dimensione file diversa!",
"differentsizeandduration-note": "Durata e dimensione file diversi!",
"differentduration-note": "Durata diversa!",
"nofile-note": "(Nessun file in riproduzione)",
# Server messages to client
"new-syncplay-available-motd-message": "Stai usando Syncplay {} ma una nuova versione è disponibile presso https://syncplay.pl", # ClientVersion
# Server notifications
"welcome-server-notification": "Benvenuto nel server Syncplay, ver. {0}", # version
"client-connected-room-server-notification": "{0}({2}) connesso alla stanza '{1}'", # username, host, room
"client-left-server-notification": "{0} ha lasciato il server", # name
"no-salt-notification": "NOTA BENE: In futuro, per consentire il corretto funzionamento delle password generate da questo server (per le stanze gestite), aggiungi da linea di comando il seguente argomento prima di avviare il server Syncplay: --salt {}", # Salt
# Server arguments
"server-argument-description": 'Programma per sincronizzare la riproduzione di media player multipli attraverso la rete. Modulo server.',
"server-argument-epilog": 'Se non è specificata alcuna opzione saranno utilizzati i valori _config',
"server-port-argument": 'Porta TCP del server',
"server-password-argument": 'password del server',
"server-isolate-room-argument": 'Mantiene le stanze isolate',
"server-salt-argument": "usare stringhe casuali per generare le password delle stanze gestite",
"server-disable-ready-argument": "disabilita la funzionalità \"pronto\"",
"server-motd-argument": "percorso del file da cui verrà letto il messaggio del giorno",
"server-chat-argument": "abilita o disabilita la chat",
"server-chat-maxchars-argument": "Numero massimo di caratteri in un messaggio di chat (default è {})", # Default number of characters
"server-maxusernamelength-argument": "Numero massimo di caratteri in un nome utente (default è {})",
"server-stats-db-file-argument": "Abilita la raccolta dei dati statistici nel file SQLite indicato",
"server-startTLS-argument": "Abilita il protocollo TLS usando i certificati contenuti nel percorso indicato",
"server-messed-up-motd-unescaped-placeholders": "Il messaggio del giorno ha dei caratteri non 'escaped'. Tutti i simboli $ devono essere doppi ($$).",
"server-messed-up-motd-too-long": "Il messaggio del giorno è troppo lungo - numero massimo di caratteri è {}, {} trovati.",
# Server errors
"unknown-command-server-error": "Comando non riconosciuto {}", # message
"not-json-server-error": "Non è una stringa in codifica JSON {}", # message
"line-decode-server-error": "Non è una stringa utf-8",
"not-known-server-error": "Devi essere autenticato dal server prima di poter inviare questo comando",
"client-drop-server-error": "Il client è caduto: {} -- {}", # host, error
"password-required-server-error": "È richiesta una password",
"wrong-password-server-error": "La password inserita è errata",
"hello-server-error": "Not enough Hello arguments", # DO NOT TRANSLATE
# Playlists
"playlist-selection-changed-notification": "{} ha cambiato il file selezionato nella playlist", # Username
"playlist-contents-changed-notification": "{} ha aggiornato la playlist", # Username
"cannot-find-file-for-playlist-switch-error": "Impossibile trovare il file {} nelle cartelle multimediali per permettere il cambio di file tramite la playlist!", # Filename
"cannot-add-duplicate-error": "Impossibile aggiungere una seconda voce per '{}' alla playlist. Non è possibile avere file duplicati.", # Filename
"cannot-add-unsafe-path-error": "Impossibile caricare automaticamente {} perché non è presente nei domini fidati. Puoi passare all'inserimento manuale facendo doppio click sull'indirizzo nella playlist, oppure aggiungerlo ai domini fidati tramite File->Avanzate->Imposta domini fidati. Cliccando col tasto destro del mouse su un indirizzo puoi impostare il suo dominio come fidato tramite il menù contestuale.", # Filename
"sharedplaylistenabled-label": "Abilita le playlist condivise",
"removefromplaylist-menu-label": "Rimuovi dalla playlist",
"shuffleremainingplaylist-menu-label": "Mescola i file non ancora riprodotti",
"shuffleentireplaylist-menu-label": "Mescola l'intera playlist",
"undoplaylist-menu-label": "Annulla l'ultima modifica alla playlist",
"addfilestoplaylist-menu-label": "Aggiungi un file alla fine della playlist",
"addurlstoplaylist-menu-label": "Aggiungi un indirizzo alla fine della playlist",
"editplaylist-menu-label": "Modifica la playlist",
"open-containing-folder": "Apri la cartella contenente questo file",
"addyourfiletoplaylist-menu-label": "Aggiungi il tuo file alla playlist",
"addotherusersfiletoplaylist-menu-label": "Aggiungi il file di {} alla playlist", # Username
"addyourstreamstoplaylist-menu-label": "Aggiungi il tuo indirizzo alla playlist",
"addotherusersstreamstoplaylist-menu-label": "Aggiungi l'indirizzo di {} alla playlist", # Username # item owner indicator
"openusersstream-menu-label": "Apri l'indirizzo di {}", # [username]
"openusersfile-menu-label": "Apri il file di {}", # [username]'s
"playlist-instruction-item-message": "Trascina qui i file per aggiungerli alla playlist condivisa.",
"sharedplaylistenabled-tooltip": "Gli operatori della stanza possono aggiungere i file a una playlist sincronizzata per garantire che tutti i partecipanti stiano guardando la stessa cosa. Configura le cartelle multimediali alla voce 'Miscellanea'.",
"playlist-empty-error": "Playlist is currently empty.", # TO DO: Translate
"playlist-invalid-index-error": "Invalid playlist index", # TO DO: Translate
}
| 78.528736
| 703
| 0.731899
|
acfe3c77e33314b8e3569f8909fe71e9f7673b46
| 3,683
|
py
|
Python
|
muranodashboard/common/utils.py
|
sbrf-clouddev/murano-dashboard
|
f98e08d827a006db5fd054ac4fb6abba786bb414
|
[
"Apache-2.0"
] | null | null | null |
muranodashboard/common/utils.py
|
sbrf-clouddev/murano-dashboard
|
f98e08d827a006db5fd054ac4fb6abba786bb414
|
[
"Apache-2.0"
] | null | null | null |
muranodashboard/common/utils.py
|
sbrf-clouddev/murano-dashboard
|
f98e08d827a006db5fd054ac4fb6abba786bb414
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import cPickle as pickle
except ImportError:
import pickle
import bs4
import string
from muranodashboard.dynamic_ui import yaql_expression
import six
import yaql
# WrappingColumn is only available in N-horizon
# This make murano-dashboard compatible with Mitaka-horizon
try:
from horizon.tables import WrappingColumn as Column
except ImportError:
from horizon.tables import Column as Column # noqa
def parse_api_error(api_error_html):
error_html = bs4.BeautifulSoup(api_error_html)
body = error_html.find('body')
if (not body or not body.text):
return None
h1 = body.find('h1')
if h1:
h1.replace_with('')
return body.text.strip()
def ensure_python_obj(obj):
mappings = {'True': True, 'False': False, 'None': None}
return mappings.get(obj, obj)
class Bunch(object):
"""Bunch dict/object-like container.
Bunch container provides both dictionary-like and
object-like attribute access.
"""
def __init__(self, **kwargs):
for key, value in six.iteritems(kwargs):
setattr(self, key, value)
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
def __contains__(self, item):
return hasattr(self, item)
def __iter__(self):
return iter(six.itervalues(self.__dict__))
class BlankFormatter(string.Formatter):
"""Utility class aimed to provide empty string for non-existent keys."""
def __init__(self, default=''):
self.default = default
def get_value(self, key, args, kwargs):
if isinstance(key, str):
return kwargs.get(key, self.default)
else:
return string.Formatter.get_value(self, key, args, kwargs)
class CustomPickler(object):
"""Custom pickle object to perform correct serializing.
YAQL Engine is not serializable and it's not necessary to store
it in cache. This class replace YAQL Engine instance to string.
"""
def __init__(self, file, protocol=0):
pickler = pickle.Pickler(file, protocol)
pickler.persistent_id = self.persistent_id
self.dump = pickler.dump
self.clear_memo = pickler.clear_memo
def persistent_id(self, obj):
if isinstance(obj, yaql.factory.YaqlEngine):
return "filtered:YaqlEngine"
else:
return None
class CustomUnpickler(object):
"""Custom pickle object to perform correct deserializing.
This class replace filtered YAQL Engine to the real instance.
"""
def __init__(self, file):
unpickler = pickle.Unpickler(file)
unpickler.persistent_load = self.persistent_load
self.load = unpickler.load
self.noload = getattr(unpickler, 'noload', None)
def persistent_load(self, obj_id):
if obj_id == 'filtered:YaqlEngine':
return yaql_expression.YAQL
else:
raise pickle.UnpicklingError('Invalid persistent id')
| 29.701613
| 78
| 0.680152
|
acfe3c95386629a4d9824a440a310f9a8018b176
| 1,007
|
py
|
Python
|
examples/index.py
|
hodgesds/solnado
|
a3a3190fcad724bb0e72e5666643e1556102ec14
|
[
"Apache-2.0"
] | 1
|
2015-11-01T17:49:24.000Z
|
2015-11-01T17:49:24.000Z
|
examples/index.py
|
hodgesds/solnado
|
a3a3190fcad724bb0e72e5666643e1556102ec14
|
[
"Apache-2.0"
] | null | null | null |
examples/index.py
|
hodgesds/solnado
|
a3a3190fcad724bb0e72e5666643e1556102ec14
|
[
"Apache-2.0"
] | null | null | null |
from functools import partial
from solnado import SolrClient
from tornado import ioloop, gen
c = SolrClient()
@gen.coroutine
def create_core():
p = partial(
c.core_create,
'foo',
)
res = yield gen.Task(p)
raise gen.Return(res)
@gen.coroutine
def create_collection():
p = partial(
c.create_collection,
'foo',
)
res = yield gen.Task(p)
raise gen.Return(res)
@gen.coroutine
def index_documents(docs):
p = partial(
c.add_json_documents,
'foo',
docs,
**{'commitWithin': 0}
)
res = yield gen.Task(p)
raise gen.Return(res)
@gen.coroutine
def main_coro():
yield create_core()
yield create_collection()
res = yield index_documents([
{
'id':'123',
'Title': 'A tale of two documents',
},{
'id': '456',
'Title': 'It was the best of times',
}])
print res.body, res.code
ioloop.IOLoop.instance().run_sync(main_coro)
| 18.648148
| 48
| 0.574975
|
acfe3d25b158d26afe31007dbf05d66bdb863e86
| 10,689
|
py
|
Python
|
scripts-history/posecam-threads-zmq-loop.py
|
freewebfish/lightweight-human-pose-estimation-3d-demo.pytorch
|
a75759b2950ecfd60c686b8b58f0184b3c8306d6
|
[
"Apache-2.0"
] | null | null | null |
scripts-history/posecam-threads-zmq-loop.py
|
freewebfish/lightweight-human-pose-estimation-3d-demo.pytorch
|
a75759b2950ecfd60c686b8b58f0184b3c8306d6
|
[
"Apache-2.0"
] | null | null | null |
scripts-history/posecam-threads-zmq-loop.py
|
freewebfish/lightweight-human-pose-estimation-3d-demo.pytorch
|
a75759b2950ecfd60c686b8b58f0184b3c8306d6
|
[
"Apache-2.0"
] | null | null | null |
from argparse import ArgumentParser
import sys
import cv2
import json
import numpy as np
from modules.myconst import *
from modules.draw import Plotter3d
import modules.myutils as mu
from modules.thread_video_streaming import *
from modules.thread_video_processing import *
import zmq
# python demo_unitycam.py --model human-pose-estimation-3d.xml --device CPU --use_openvino --height_size 256 --cam_id 2 --cam_width 640 --cam_height 480
# python demo_unitycam.py --model human-pose-estimation-3d.pth --height_size 256 --cam_id 2 --cam_width 640 --cam_height 480
if __name__ == '__main__':
parser = ArgumentParser(description='Pose Camera Demo. '
'Press esc to exit, "p" to (un)pause video or process next image.')
parser.add_argument('--model',
help='Required. Path to checkpoint with a trained model '
'(or an .xml file in case of OpenVINO inference).',
type=str, required=True)
parser.add_argument('--video', help='Optional. Path to video file.', type=str, default='')
parser.add_argument('--cam_id', type=int, default=1)
parser.add_argument('--cam_width', help='Optional. camera width.', type=int, default=640)
parser.add_argument('--cam_height', help='Optional. camera height.', type=int, default=480)
parser.add_argument('--mirror_flip', help='Optional. Do mirror flipping on the image frame.',
action='store_true')
parser.add_argument('--skip_frames', type=int, default=1, help='Optional. Skip frames to speed up the detection.')
parser.add_argument('--show_frames', help='Optional. Show frames with processed poses.',
action='store_true')
parser.add_argument('--height_size', help='Optional. Network input layer height size.', type=int, default=256)
parser.add_argument('--frame_scaling_option', help='Optional. Frame scaling option: 0--padding, 1--cropping, 2--frame shape.', type=int, default=0)
parser.add_argument('--device', help='Optional. The target device to infer on: CPU or GPU (default).',
type=str, default='GPU')
parser.add_argument('--use_openvino',
help='Optional. Run network with OpenVINO as inference engine, support CPU, GPU, FPGA, HDDL or MYRIAD.',
action='store_true')
parser.add_argument('--use_tensorrt', help='Optional. Run network with TensorRT as inference engine.',
action='store_true')
parser.add_argument('--extrinsics_path',
help='Optional. Path to file with camera extrinsics.',
type=str, default=None)
parser.add_argument('--fx', type=np.float32, default=-1, help='Optional. Camera focal length.')
args = parser.parse_args()
video_param, pose_param = mu.check_input_args(args)
#setup the video capture stream with multiple threads
if video_param.video_path != '':
cap = ThreadVideoStreaming(video_param.video_path, video_param.mirror_flip).start()
else:
cap = ThreadVideoStreaming(video_param.cam_id, video_param.mirror_flip, \
video_param.cam_width, video_param.cam_height).start()
frame_width = cap.width
frame_height = cap.height
proc = ThreadVideoProcessing(pose_param).start()
#setup 3D skeleton plot window
if video_param.show_frames:
renderer_param = mu.setup_renderer(frame_width, frame_height, "PoseCam 3D", "PoseCam Frame")
# Create ZMQ socket to use for sending (and receiving)
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind("tcp://*:5555")
#prepare for loop rolling
delay = 1
frame_count = 0
tic_start = cv2.getTickCount()
while True:
msgRequest = socket.recv().decode('utf-8')
if msgRequest == msgClientAlive:
print(f"{TextColor.fg.blue}<- {msgRequest}{TextColor.fg.black}")
msgReply = msgServerAlive
try:
socket.send(msgReply.encode('utf-8'))
print(f"{TextColor.fg.red}-> {msgReply}{TextColor.fg.black}")
except:
print(f"{TextColor.fg.red}>>> In sending {msgReply}, encountered {Exception}@@@ {TextColor.fg.black}")
break
elif (msgRequest[0:10] == msgClientParam):
print(f"{TextColor.fg.blue}<- {msgRequest}{TextColor.fg.black}")
ucam_param = json.loads(msgRequest)
args.cam_id = ucam_param["cam_id"]
args.cam_width = ucam_param["cam_width"]
args.cam_height = ucam_param["cam_height"]
args.mirror_flip = ucam_param["mirror_flip"]
args.skip_frames = ucam_param["skip_frames"]
args.height_size = ucam_param["height_size"]
video_param, pose_param = mu.check_input_args(args)
ucam_args = json.dumps({
"cam_id": video_param.cam_id,
"cam_width": video_param.cam_width,
"cam_height": video_param.cam_height,
"mirror_flip": video_param.mirror_flip,
"skip_frames": video_param.skip_frames,
"height_size": pose_param.base_height
})
msgReply = ucam_args
try:
socket.send(msgReply.encode('utf-8'))
print(f"{TextColor.fg.red}-> {msgReply}{TextColor.fg.black}")
except:
print(f"{TextColor.fg.red}>>> In sending {msgReply}, encountered {Exception}@@@ {TextColor.fg.black}")
break
elif (msgRequest == msgClientReady):
print(f"{TextColor.fg.blue}<- {msgRequest}{TextColor.fg.black}")
#setup the video capture stream with multiple threads
if video_param.video_path != '':
cap = ThreadVideoStreaming(video_param.video_path, video_param.mirror_flip).start()
else:
cap = ThreadVideoStreaming(video_param.cam_id, video_param.mirror_flip, \
video_param.cam_width, video_param.cam_height).start()
frame_width = cap.width
frame_height = cap.height
proc = ThreadVideoProcessing(pose_param).start()
#setup 3D skeleton plot window
if video_param.show_frames:
mu.setup_renderer(frame_width, frame_height, 'PoseCam 3D', 'PoseCam Frame')
msgReply = msgServerReady
try:
socket.send(msgReply.encode('utf-8'))
print(f"{TextColor.fg.red}-> {msgReply}{TextColor.fg.black}")
except:
print(f"{TextColor.fg.red}>>> In sending {msgReply}, encountered {Exception}@@@ {TextColor.fg.black}")
break
elif (msgRequest == msgClientFrame):
tic0 = cv2.getTickCount()
grabbed, frame = cap.read()
if not grabbed:
cap.stop()
proc.stop()
break
fps.video = cv2.getTickFrequency() / (cv2.getTickCount() - tic0)
tic1 = cv2.getTickCount()
posedata = proc.read()
fps.openpose = cv2.getTickFrequency() / (cv2.getTickCount() - tic1)
frame_count += 1
fps.processing = frame_count / ((cv2.getTickCount() - tic_start) / cv2.getTickFrequency())
#print(f"frame {frame_count} processed || fps.video = {int(fps.video)} || fps.openpose = {int(fps.openpose)} || fps.processing = {int(fps.processing)}", end="\r")
tic2 = cv2.getTickCount()
if video_param.show_frames: mu.render_poses(frame, posedata, renderer_param)
fps.t_plot = (cv2.getTickCount() - tic2) / cv2.getTickFrequency()
tic3 = cv2.getTickCount()
pose_json = mu.jsonify_poses(posedata, frame_width, frame_height)
fps.t_json = (cv2.getTickCount() - tic3) / cv2.getTickFrequency()
#total_elapsed_time = fps.t_scale + fps.t_infer + fps.t_parse + fps.t_calc3d + fps.t_plot
#print(f"elapsed time={total_elapsed_time:.4f}: scale={fps.t_scale:.4f}, infer={fps.t_infer:.4f}, parse={fps.t_parse:.4f}, calc3d={fps.t_calc3d:.4f}, plot={fps.t_plot:.4f}")
msgReply = pose_json
tic = cv2.getTickCount()
try:
socket.send(msgReply.encode('utf-8'))
print(f"{TextColor.fg.red}-> frame {frame_count} sent || fps.video = {int(fps.video)} || fps.openpose = {int(fps.openpose)} || fps.processing = {int(fps.processing)} {TextColor.fg.black}", end="\r")
except:
print(f"{TextColor.fg.red}>>> In sending frame {frame_count}, encountered {Exception}@@@ {TextColor.fg.black}")
break
fps.t_send = (cv2.getTickCount() - tic) / cv2.getTickFrequency()
else:
print(f"{TextColor.fg.blue}<- {msgRequest}{TextColor.fg.black}")
msgReply = msgServerInvalid
try:
socket.send(msgReply.encode('utf-8'))
print(f"{TextColor.fg.red}-> {msgReply}{TextColor.fg.black}")
except:
print(f"{TextColor.fg.red}>>> In sending {msgReply}, encountered {Exception}@@@ {TextColor.fg.black}")
break
# allow user to pause or exit using keyboard
key = cv2.waitKey(delay)
if key == ESC_CODE: break
if key == P_CODE:
if delay == 1: delay = 0
else: delay = 1
if delay == 0 and video_param.show_frames: # allow to rotate 3D canvas while on pause
key = 0
while (key != P_CODE and key != ESC_CODE and key != SPACE_CODE and len(posedata.poses_3d) >0):
renderer_param.plotter.plot(renderer_param.canvas_3d, posedata.poses_3d, renderer_param.edges)
cv2.imshow(renderer_param.canvas_3d_window_name, renderer_param.canvas_3d)
key = cv2.waitKey(33)
if key == ESC_CODE: break
else: delay = 1
if cap: cap.stop()
if proc: proc.stop()
if video_param.show_frames: cv2.destroyAllWindows()
sys.exit()
| 50.419811
| 238
| 0.579661
|
acfe3e3276d49813855d984c4667f3c7ff90d50b
| 3,751
|
py
|
Python
|
shaderjig/axes.py
|
mkvenkit/nocg
|
42311010f5e822b014d5668e2653333430394e40
|
[
"MIT"
] | 3
|
2021-06-04T14:53:22.000Z
|
2021-07-09T14:31:28.000Z
|
shaderjig/axes.py
|
mkvenkit/nocg
|
42311010f5e822b014d5668e2653333430394e40
|
[
"MIT"
] | null | null | null |
shaderjig/axes.py
|
mkvenkit/nocg
|
42311010f5e822b014d5668e2653333430394e40
|
[
"MIT"
] | null | null | null |
"""
axes.py
Python OpenGL program that draws XYZ axes
Author: Mahesh Venkitachalam
"""
import OpenGL
from OpenGL.GL import *
import numpy as np
import math, sys, os
import glutils
import glfw
strVS = """
#version 410 core
in vec3 aVert;
in vec3 aCol;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
out vec4 vColor;
void main() {
// transform vertex
gl_Position = uPMatrix * uMVMatrix * vec4(aVert, 1.0);
vColor = vec4(aCol.rgb, 1.0);
}
"""
strFS = """
#version 410 core
in vec4 vColor;
out vec4 fragColor;
void main() {
fragColor = vColor;
}
"""
class Axes3D:
""" OpenGL 3D scene class"""
# initialization
def __init__(self, axes_len):
# create shader
self.program = glutils.loadShaders(strVS, strFS)
glUseProgram(self.program)
self.pMatrixUniform = glGetUniformLocation(self.program,
b'uPMatrix')
self.mvMatrixUniform = glGetUniformLocation(self.program,
b'uMVMatrix')
# axis length
self.axes_len = axes_len
# create axes geometry
(vertices, colours) = self.create_axes(15)
#print(vertices.shape)
#print(colours.shape)
# set up vertex array object (VAO)
self.vao = glGenVertexArrays(1)
glBindVertexArray(self.vao)
# vertices
self.vertexBuffer = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, self.vertexBuffer)
# set buffer data
glBufferData(GL_ARRAY_BUFFER, 4*len(vertices), vertices,
GL_STATIC_DRAW)
# colors
self.colorBuffer = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, self.colorBuffer)
# set buffer data
glBufferData(GL_ARRAY_BUFFER, 4*len(colours), colours,
GL_STATIC_DRAW)
# get locations
self.vertLoc = glGetAttribLocation(self.program, b"aVert")
self.colLoc = glGetAttribLocation(self.program, b"aCol")
# enable vertex arrays
glEnableVertexAttribArray(self.vertLoc)
# bind
glBindBuffer(GL_ARRAY_BUFFER, self.vertexBuffer)
# set buffer data pointer
glVertexAttribPointer(self.vertLoc, 3, GL_FLOAT, GL_FALSE, 0, None)
# enable vertex arrays
glEnableVertexAttribArray(self.colLoc)
# bind
glBindBuffer(GL_ARRAY_BUFFER, self.colorBuffer)
# set buffer data pointer
glVertexAttribPointer(self.colLoc, 3, GL_FLOAT, GL_FALSE, 0, None)
# unbind VAO
glBindVertexArray(0)
def create_axes(self, axes_len):
# vertices
vertices = np.array([ 0.0, 0.0, 0.0, axes_len, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, axes_len, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, axes_len],
np.float32)
# colours
colours = np.array([ 1.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 1.0],
np.float32)
return (vertices, colours)
# render
def render(self, pMatrix, mvMatrix):
# use shader
glUseProgram(self.program)
# set proj matrix
glUniformMatrix4fv(self.pMatrixUniform, 1, GL_FALSE, pMatrix)
# set modelview matrix
glUniformMatrix4fv(self.mvMatrixUniform, 1, GL_FALSE, mvMatrix)
# bind VAO
glBindVertexArray(self.vao)
# draw
#glLineWidth(1.5)
glDrawArrays(GL_LINES, 0, 6)
# unbind VAO
glBindVertexArray(0)
glUseProgram(0)
| 25.691781
| 75
| 0.568115
|
acfe3eb7e24c23e366cbc022af0fcb783d32d56e
| 2,942
|
py
|
Python
|
code/src/main/python/codejam/fig_generator.py
|
anonfse/COSAL_Anonymized
|
709906294fd775131f3e019862bbdd554d83773d
|
[
"Unlicense"
] | null | null | null |
code/src/main/python/codejam/fig_generator.py
|
anonfse/COSAL_Anonymized
|
709906294fd775131f3e019862bbdd554d83773d
|
[
"Unlicense"
] | 1
|
2021-11-03T08:28:31.000Z
|
2021-11-03T08:28:31.000Z
|
code/src/main/python/codejam/fig_generator.py
|
anonfse/COSAL_Anonymized
|
709906294fd775131f3e019862bbdd554d83773d
|
[
"Unlicense"
] | 1
|
2022-03-22T14:24:13.000Z
|
2022-03-22T14:24:13.000Z
|
from __future__ import division, print_function
import sys
import os
sys.path.append(os.path.abspath("."))
sys.dont_write_bytecode = True
__author__ = "COSAL"
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
sns.set_style("white")
def plot():
java_functions = np.cumsum([1680,1572,439,321,168])
py_functions = np.cumsum([963,1015,498,382,277])
mixed_functions = np.cumsum([62,63,6,0,0])
funcs = np.transpose([java_functions, py_functions, mixed_functions])
x = ["1", "2", "3", "4", "5"]
# plt.figure()
fig, ax = plt.subplots(figsize=(8, 4))
ax.plot(np.arange(5), java_functions, '*-', label="Java")
ax.plot(np.arange(5), py_functions, '*-',label="Python")
ax.plot(np.arange(5), mixed_functions, '*-',label="Java + Python")
ax.set_yscale("log")
plt.ylabel("# Clones (Log Scale)", fontweight="bold", fontsize=16)
plt.xlabel("# Arguments", fontweight="bold", fontsize=16)
plt.xticks(np.arange(5), x)
ax.tick_params(axis='both', which='major', labelsize=14)
# ax.tick_params(axis='both', which='minor', labelsize=12)
legend = ax.legend(shadow=True, fontsize=16)
plt.tight_layout()
plt.savefig("png_clones_vs_args.png")
# plot()
def plot_bar():
# y = [271,811,843,747,753,502,275,281,383,364,288,261,279,252,227,184,132,101,73,66,69,55,53,46,24,28,31,7,4,36]
java_lst = [140, 420, 436, 387, 390, 260, 142, 145, 198, 188, 149, 135, 144, 130, 117, 95, 68, 52, 38, 34, 36, 26, 22, 24, 12, 14, 16, 4, 2, 19]
py_lst = [148, 334, 365, 271, 273, 132, 100, 102, 109, 102, 104, 95, 101, 81, 72, 57, 48, 37, 26, 24, 25, 18, 16, 17, 9, 10, 11, 3, 1, 13]
j_sum = sum(java_lst)
p_sum = sum(py_lst)
print("### Java")
for j in xrange(len(java_lst)):
print(j, round(sum(java_lst[:j+1]) / j_sum, 2))
print("### Python")
for j in xrange(len(py_lst)):
print(j, round(sum(py_lst[:j+1]) / p_sum, 2))
x = map(str, range(1,30)) + ["30+"]
indices = np.arange(len(x))
plt.figure(figsize=(8, 3))
plt.plot(indices, java_lst, label="Java")
plt.plot(indices, py_lst, label="Python")
plt.xticks(indices, x)
plt.xlabel("Lines of Code", fontweight="bold", fontsize=16)
plt.ylabel("# Clones", fontweight="bold", fontsize=16)
plt.tick_params(axis='y', which='major', labelsize=12)
legend = plt.legend(shadow=True, fontsize=16)
plt.tight_layout()
plt.savefig("png_clones_vs_loc.png", bbox_inches='tight')
plot_bar()
lst = [271,811,843,747,753,502,275,281,383,364,288,261,279,252,227,184,132,101,73,66,69,50,43,46,24,28,31,7,4,36]
# java_lst = [int(round(3845/7431 * x)) for x in lst]
java_lst = [140, 420, 436, 387, 390, 260, 142, 145, 198, 188, 149, 135, 144, 130, 117, 95, 68, 52, 38, 34, 36, 26, 22, 24, 12, 14, 16, 4, 2, 19]
# py_lst = [int(round(2691/7431 * x)) for x in lst]
py_lst = [148, 334, 365, 271, 273, 132, 100, 102, 109, 102, 104, 95, 101, 91, 82, 67, 48, 37, 26, 24, 25, 18, 16, 17, 9, 10, 11, 3, 1, 13]
print(0.53 * 3845, 0.51 * 2691)
| 38.207792
| 147
| 0.645819
|
acfe3fc23737003b9e14a7c4c1775889d6ae3983
| 16,751
|
py
|
Python
|
dev/archery/archery/release.py
|
boaz-codota/arrow-datafusion
|
00b4a845dc80aee59c1de36c9450f0b8f9ee2f36
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 1
|
2019-03-21T16:02:48.000Z
|
2019-03-21T16:02:48.000Z
|
dev/archery/archery/release.py
|
boaz-codota/arrow-datafusion
|
00b4a845dc80aee59c1de36c9450f0b8f9ee2f36
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 84
|
2022-01-13T09:59:21.000Z
|
2022-03-22T06:42:44.000Z
|
dev/archery/archery/release.py
|
boaz-codota/arrow-datafusion
|
00b4a845dc80aee59c1de36c9450f0b8f9ee2f36
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 1
|
2018-03-24T13:18:45.000Z
|
2018-03-24T13:18:45.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import functools
import os
import re
import pathlib
import shelve
import warnings
from git import Repo
from jira import JIRA
from semver import VersionInfo as SemVer
from .utils.source import ArrowSources
from .utils.report import JinjaReport
def cached_property(fn):
return property(functools.lru_cache(maxsize=1)(fn))
class Version(SemVer):
__slots__ = ('released', 'release_date')
def __init__(self, released=False, release_date=None, **kwargs):
super().__init__(**kwargs)
self.released = released
self.release_date = release_date
@classmethod
def parse(cls, version, **kwargs):
return cls(**SemVer.parse(version).to_dict(), **kwargs)
@classmethod
def from_jira(cls, jira_version):
return cls.parse(
jira_version.name,
released=jira_version.released,
release_date=getattr(jira_version, 'releaseDate', None)
)
class Issue:
def __init__(self, key, type, summary):
self.key = key
self.type = type
self.summary = summary
@classmethod
def from_jira(cls, jira_issue):
return cls(
key=jira_issue.key,
type=jira_issue.fields.issuetype.name,
summary=jira_issue.fields.summary
)
@property
def project(self):
return self.key.split('-')[0]
@property
def number(self):
return int(self.key.split('-')[1])
class Jira(JIRA):
def __init__(self, user=None, password=None,
url='https://issues.apache.org/jira'):
user = user or os.environ.get('APACHE_JIRA_USER')
password = password or os.environ.get('APACHE_JIRA_PASSWORD')
super().__init__(url, basic_auth=(user, password))
def project_version(self, version_string, project='ARROW'):
# query version from jira to populated with additional metadata
versions = {str(v): v for v in self.project_versions(project)}
return versions[version_string]
def project_versions(self, project):
versions = []
for v in super().project_versions(project):
try:
versions.append(Version.from_jira(v))
except ValueError:
# ignore invalid semantic versions like JS-0.4.0
continue
return sorted(versions, reverse=True)
def issue(self, key):
return Issue.from_jira(super().issue(key))
def project_issues(self, version, project='ARROW'):
query = "project={} AND fixVersion={}".format(project, version)
issues = super().search_issues(query, maxResults=False)
return list(map(Issue.from_jira, issues))
class CachedJira:
def __init__(self, cache_path, jira=None):
self.jira = jira or Jira()
self.cache_path = cache_path
def __getattr__(self, name):
attr = getattr(self.jira, name)
return self._cached(name, attr) if callable(attr) else attr
def _cached(self, name, method):
def wrapper(*args, **kwargs):
key = str((name, args, kwargs))
with shelve.open(self.cache_path) as cache:
try:
result = cache[key]
except KeyError:
cache[key] = result = method(*args, **kwargs)
return result
return wrapper
_TITLE_REGEX = re.compile(
r"(?P<issue>(?P<project>(ARROW|PARQUET))\-\d+)?\s*:?\s*"
r"(?P<components>\[.*\])?\s*(?P<summary>.*)"
)
_COMPONENT_REGEX = re.compile(r"\[([^\[\]]+)\]")
class CommitTitle:
def __init__(self, summary, project=None, issue=None, components=None):
self.project = project
self.issue = issue
self.components = components or []
self.summary = summary
def __str__(self):
out = ""
if self.issue:
out += "{}: ".format(self.issue)
if self.components:
for component in self.components:
out += "[{}]".format(component)
out += " "
out += self.summary
return out
def __eq__(self, other):
return (
self.summary == other.summary and
self.project == other.project and
self.issue == other.issue and
self.components == other.components
)
def __hash__(self):
return hash(
(self.summary, self.project, self.issue, tuple(self.components))
)
@classmethod
def parse(cls, headline):
matches = _TITLE_REGEX.match(headline)
if matches is None:
warnings.warn(
"Unable to parse commit message `{}`".format(headline)
)
return CommitTitle(headline)
values = matches.groupdict()
components = values.get('components') or ''
components = _COMPONENT_REGEX.findall(components)
return CommitTitle(
values['summary'],
project=values.get('project'),
issue=values.get('issue'),
components=components
)
class Commit:
def __init__(self, wrapped):
self._title = CommitTitle.parse(wrapped.summary)
self._wrapped = wrapped
def __getattr__(self, attr):
if hasattr(self._title, attr):
return getattr(self._title, attr)
else:
return getattr(self._wrapped, attr)
def __repr__(self):
template = '<Commit sha={!r} issue={!r} components={!r} summary={!r}>'
return template.format(self.hexsha, self.issue, self.components,
self.summary)
@property
def url(self):
return 'https://github.com/apache/arrow/commit/{}'.format(self.hexsha)
@property
def title(self):
return self._title
class ReleaseCuration(JinjaReport):
templates = {
'console': 'release_curation.txt.j2'
}
fields = [
'release',
'within',
'outside',
'nojira',
'parquet',
'nopatch'
]
class JiraChangelog(JinjaReport):
templates = {
'markdown': 'release_changelog.md.j2',
'html': 'release_changelog.html.j2'
}
fields = [
'release',
'categories'
]
class Release:
def __init__(self):
raise TypeError("Do not initialize Release class directly, use "
"Release.from_jira(version) instead.")
def __repr__(self):
if self.version.released:
status = "released_at={!r}".format(self.version.release_date)
else:
status = "pending"
return "<{} {!r} {}>".format(self.__class__.__name__,
str(self.version), status)
@staticmethod
def from_jira(version, jira=None, repo=None):
if jira is None:
jira = Jira()
elif isinstance(jira, str):
jira = Jira(jira)
elif not isinstance(jira, (Jira, CachedJira)):
raise TypeError("`jira` argument must be a server url or a valid "
"Jira instance")
if repo is None:
arrow = ArrowSources.find()
repo = Repo(arrow.path)
elif isinstance(repo, (str, pathlib.Path)):
repo = Repo(repo)
elif not isinstance(repo, Repo):
raise TypeError("`repo` argument must be a path or a valid Repo "
"instance")
if isinstance(version, str):
version = jira.project_version(version, project='ARROW')
elif not isinstance(version, Version):
raise TypeError(version)
# decide the type of the release based on the version number
if version.patch == 0:
if version.minor == 0:
klass = MajorRelease
elif version.major == 0:
# handle minor releases before 1.0 as major releases
klass = MajorRelease
else:
klass = MinorRelease
else:
klass = PatchRelease
# prevent instantiating release object directly
obj = klass.__new__(klass)
obj.version = version
obj.jira = jira
obj.repo = repo
return obj
@property
def is_released(self):
return self.version.released
@property
def tag(self):
return "apache-arrow-{}".format(str(self.version))
@property
def branch(self):
raise NotImplementedError()
@property
def siblings(self):
"""
Releases to consider when calculating previous and next releases.
"""
raise NotImplementedError()
@cached_property
def previous(self):
# select all non-patch releases
position = self.siblings.index(self.version)
try:
previous = self.siblings[position + 1]
except IndexError:
# first release doesn't have a previous one
return None
else:
return Release.from_jira(previous, jira=self.jira, repo=self.repo)
@cached_property
def next(self):
# select all non-patch releases
position = self.siblings.index(self.version)
if position <= 0:
raise ValueError("There is no upcoming release set in JIRA after "
"version {}".format(self.version))
upcoming = self.siblings[position - 1]
return Release.from_jira(upcoming, jira=self.jira, repo=self.repo)
@cached_property
def issues(self):
issues = self.jira.project_issues(self.version, project='ARROW')
return {i.key: i for i in issues}
@cached_property
def commits(self):
"""
All commits applied between two versions.
"""
if self.previous is None:
# first release
lower = ''
else:
lower = self.repo.tags[self.previous.tag]
if self.version.released:
upper = self.repo.tags[self.tag]
else:
try:
upper = self.repo.branches[self.branch]
except IndexError:
warnings.warn("Release branch `{}` doesn't exist."
.format(self.branch))
return []
commit_range = "{}..{}".format(lower, upper)
return list(map(Commit, self.repo.iter_commits(commit_range)))
def curate(self):
# handle commits with parquet issue key specially and query them from
# jira and add it to the issues
release_issues = self.issues
within, outside, nojira, parquet = [], [], [], []
for c in self.commits:
if c.issue is None:
nojira.append(c)
elif c.issue in release_issues:
within.append((release_issues[c.issue], c))
elif c.project == 'PARQUET':
parquet.append((self.jira.issue(c.issue), c))
else:
outside.append((self.jira.issue(c.issue), c))
# remaining jira tickets
within_keys = {i.key for i, c in within}
nopatch = [issue for key, issue in release_issues.items()
if key not in within_keys]
return ReleaseCuration(release=self, within=within, outside=outside,
nojira=nojira, parquet=parquet, nopatch=nopatch)
def changelog(self):
release_issues = []
# get organized report for the release
curation = self.curate()
# jira tickets having patches in the release
for issue, _ in curation.within:
release_issues.append(issue)
# jira tickets without patches
for issue in curation.nopatch:
release_issues.append(issue)
# parquet patches in the release
for issue, _ in curation.parquet:
release_issues.append(issue)
# organize issues into categories
issue_types = {
'Bug': 'Bug Fixes',
'Improvement': 'New Features and Improvements',
'New Feature': 'New Features and Improvements',
'Sub-task': 'New Features and Improvements',
'Task': 'New Features and Improvements',
'Test': 'Bug Fixes',
'Wish': 'New Features and Improvements',
}
categories = defaultdict(list)
for issue in release_issues:
categories[issue_types[issue.type]].append(issue)
# sort issues by the issue key in ascending order
for name, issues in categories.items():
issues.sort(key=lambda issue: (issue.project, issue.number))
return JiraChangelog(release=self, categories=categories)
class MaintenanceMixin:
"""
Utility methods for cherry-picking commits from the main branch.
"""
def commits_to_pick(self, exclude_already_applied=True):
# collect commits applied on the main branch since the root of the
# maintenance branch (the previous major release)
if self.version.major == 0:
# treat minor releases as major releases preceeding 1.0.0 release
commit_range = "apache-arrow-0.{}.0..master".format(
self.version.minor - 1
)
else:
commit_range = "apache-arrow-{}.0.0..master".format(
self.version.major
)
# keeping the original order of the commits helps to minimize the merge
# conflicts during cherry-picks
commits = map(Commit, self.repo.iter_commits(commit_range))
# exclude patches that have been already applied to the maintenance
# branch, we cannot identify patches based on sha because it changes
# after the cherry pick so use commit title instead
if exclude_already_applied:
already_applied = {c.title for c in self.commits}
else:
already_applied = set()
# iterate over the commits applied on the main branch and filter out
# the ones that are included in the jira release
patches_to_pick = [c for c in commits if
c.issue in self.issues and
c.title not in already_applied]
return reversed(patches_to_pick)
def cherry_pick_commits(self, recreate_branch=True):
if recreate_branch:
# delete, create and checkout the maintenance branch based off of
# the previous tag
if self.branch in self.repo.branches:
self.repo.git.branch('-D', self.branch)
self.repo.git.checkout(self.previous.tag, b=self.branch)
else:
# just checkout the already existing maintenance branch
self.repo.git.checkout(self.branch)
# cherry pick the commits based on the jira tickets
for commit in self.commits_to_pick():
self.repo.git.cherry_pick(commit.hexsha)
class MajorRelease(Release):
@property
def branch(self):
return "master"
@cached_property
def siblings(self):
"""
Filter only the major releases.
"""
# handle minor releases before 1.0 as major releases
return [v for v in self.jira.project_versions('ARROW')
if v.patch == 0 and (v.major == 0 or v.minor == 0)]
class MinorRelease(Release, MaintenanceMixin):
@property
def branch(self):
return "maint-{}.x.x".format(self.version.major)
@cached_property
def siblings(self):
"""
Filter the major and minor releases.
"""
return [v for v in self.jira.project_versions('ARROW') if v.patch == 0]
class PatchRelease(Release, MaintenanceMixin):
@property
def branch(self):
return "maint-{}.{}.x".format(self.version.major, self.version.minor)
@cached_property
def siblings(self):
"""
No filtering, consider all releases.
"""
return self.jira.project_versions('ARROW')
| 31.251866
| 79
| 0.595964
|
acfe3fd54d72dd30e0c34b3f48551856e84289a4
| 3,638
|
py
|
Python
|
tests/test_object_utils.py
|
zimeon/ocfl-py
|
3d696d797aa89a90a2a471d907acff07ef4a364d
|
[
"MIT"
] | 14
|
2018-09-10T20:08:04.000Z
|
2022-03-29T18:10:43.000Z
|
tests/test_object_utils.py
|
zimeon/ocfl-py
|
3d696d797aa89a90a2a471d907acff07ef4a364d
|
[
"MIT"
] | 73
|
2019-02-13T20:35:09.000Z
|
2022-03-24T15:21:34.000Z
|
tests/test_object_utils.py
|
zimeon/ocfl-py
|
3d696d797aa89a90a2a471d907acff07ef4a364d
|
[
"MIT"
] | 3
|
2019-02-13T18:39:50.000Z
|
2021-05-04T15:39:04.000Z
|
# -*- coding: utf-8 -*-
"""Object tests."""
import argparse
import unittest
from ocfl.object_utils import remove_first_directory, make_unused_filepath, next_version, add_object_args, add_shared_args, check_shared_args, find_path_type
class TestAll(unittest.TestCase):
"""TestAll class to run tests."""
def test_remove_first_directory(self):
"""Test remove_first_directory function."""
self.assertEqual(remove_first_directory(''), '')
self.assertEqual(remove_first_directory('a'), '')
self.assertEqual(remove_first_directory('a/b'), 'b')
self.assertEqual(remove_first_directory('a/b/'), 'b')
self.assertEqual(remove_first_directory('a/b/c'), 'b/c')
def test_make_unused_filepath(self):
"""Test make_unused_filepath function."""
self.assertEqual(make_unused_filepath('x/y', []), 'x/y__2')
self.assertEqual(make_unused_filepath('x/y', {'x/y__2': 1}), 'x/y__3')
self.assertEqual(make_unused_filepath('x/y', {'x/y': 1}, ''), 'x/y2')
self.assertEqual(make_unused_filepath('x/y', ['x/y', 'x/y2', 'x/y3'], ''), 'x/y4')
def test_next_version(self):
"""Test next_version function."""
self.assertRaises(Exception, next_version, '1')
self.assertRaises(Exception, next_version, 1)
self.assertRaises(Exception, next_version, 'v1v')
# good non-zero padded
self.assertEqual(next_version('v1'), 'v2')
self.assertEqual(next_version('v99'), 'v100')
self.assertEqual(next_version('v1234'), 'v1235')
# good zero-padded
self.assertEqual(next_version('v01'), 'v02')
self.assertEqual(next_version('v00001'), 'v00002')
self.assertEqual(next_version('v00999'), 'v01000')
self.assertEqual(next_version('v0998'), 'v0999')
# overflow
self.assertRaises(Exception, next_version, 'v09')
self.assertRaises(Exception, next_version, 'v0999')
def test_add_object_args(self):
"""Test (kinda) adding object args."""
parser = argparse.ArgumentParser()
add_object_args(parser)
args = parser.parse_args(['--skip', 'aa'])
self.assertIn('aa', args.skip)
def test_add_shared_args(self):
"""Test (kinda) adding shared args."""
parser = argparse.ArgumentParser()
add_shared_args(parser)
args = parser.parse_args(['--version', '-v'])
self.assertTrue(args.version)
self.assertTrue(args.verbose)
def test_check_shared_args(self):
"""Test check of shared args."""
parser = argparse.ArgumentParser()
add_shared_args(parser)
parser.parse_args(['--version', '-v'])
check_shared_args(parser.parse_args(['-v']))
self.assertRaises(SystemExit, check_shared_args, parser.parse_args(['--version']))
def test_find_path_type(self):
"""Test find_path_type function."""
self.assertEqual(find_path_type("extra_fixtures/good-storage-roots/fedora-root"), "root")
self.assertEqual(find_path_type("fixtures/1.0/good-objects/minimal_one_version_one_file"), "object")
self.assertEqual(find_path_type("README"), "file")
self.assertIn("does not exist", find_path_type("this_path_does_not_exist"))
self.assertIn("nor can parent", find_path_type("still_nope/nope_doesnt_exist"))
self.assertEqual(find_path_type("ocfl"), "no 0= declaration file")
self.assertIn("more than one 0= declaration file", find_path_type("extra_fixtures/misc/multiple_declarations"))
self.assertIn("unrecognized", find_path_type("extra_fixtures/misc/unknown_declaration"))
| 47.246753
| 157
| 0.667675
|
acfe40021930624d0999d265e394e77b744a088e
| 283
|
py
|
Python
|
mindhome_alpha/erpnext/education/doctype/fee_schedule/fee_schedule_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
mindhome_alpha/erpnext/education/doctype/fee_schedule/fee_schedule_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | null | null | null |
mindhome_alpha/erpnext/education/doctype/fee_schedule/fee_schedule_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
def get_data():
return {
'fieldname': 'fee_schedule',
'transactions': [
{
'items': ['Fees']
}
]
}
| 21.769231
| 68
| 0.681979
|
acfe404d3b8824f0a228bba894960a5bdd898e12
| 1,934
|
py
|
Python
|
solidity/python/PerformanceUniTestSale.py
|
frankwei98/contracts
|
18fa00ec598058459d96950523c3fc23d2c00bd6
|
[
"Apache-2.0"
] | 13
|
2018-09-18T09:55:27.000Z
|
2021-01-07T02:35:08.000Z
|
solidity/python/PerformanceUniTestSale.py
|
frankwei98/contracts
|
18fa00ec598058459d96950523c3fc23d2c00bd6
|
[
"Apache-2.0"
] | 47
|
2019-02-13T06:25:37.000Z
|
2021-07-30T05:23:44.000Z
|
solidity/python/PerformanceUniTestSale.py
|
frankwei98/contracts
|
18fa00ec598058459d96950523c3fc23d2c00bd6
|
[
"Apache-2.0"
] | 12
|
2018-09-05T07:13:33.000Z
|
2019-05-28T09:53:06.000Z
|
import Web3Wrapper
import InputGenerator
MINIMUM_VALUE_SUPPLY = 100
MAXIMUM_VALUE_SUPPLY = 10 ** 34
SAMPLES_COUNT_SUPPLY = 50
MINIMUM_VALUE_BALANCE = 100
MAXIMUM_VALUE_BALANCE = 10 ** 34
SAMPLES_COUNT_BALANCE = 50
MINIMUM_VALUE_WEIGHT = 100000
MAXIMUM_VALUE_WEIGHT = 900000
SAMPLES_COUNT_WEIGHT = 10
MINIMUM_VALUE_AMOUNT = 1
MAXIMUM_VALUE_AMOUNT = 10 ** 34
SAMPLES_COUNT_AMOUNT = 50
def Main():
rangeSupply = InputGenerator.UniformDistribution(MINIMUM_VALUE_SUPPLY, MAXIMUM_VALUE_SUPPLY, SAMPLES_COUNT_SUPPLY)
rangeBalance = InputGenerator.UniformDistribution(MINIMUM_VALUE_BALANCE, MAXIMUM_VALUE_BALANCE, SAMPLES_COUNT_BALANCE)
rangeWeight = InputGenerator.UniformDistribution(MINIMUM_VALUE_WEIGHT, MAXIMUM_VALUE_WEIGHT, SAMPLES_COUNT_WEIGHT)
rangeAmount = InputGenerator.UniformDistribution(MINIMUM_VALUE_AMOUNT, MAXIMUM_VALUE_AMOUNT, SAMPLES_COUNT_AMOUNT)
testNum = 0
numOfTests = len(rangeSupply) * len(rangeBalance) * len(rangeWeight) * len(rangeAmount)
tester = Web3Wrapper.Contract('BancorFormula').tester()
minGas = float('+inf')
maxGas = float('-inf')
totalGas = 0
countGas = 0
for supply in rangeSupply:
for balance in rangeBalance:
for weight in rangeWeight:
for amount in rangeAmount:
testNum += 1
if amount <= supply:
try:
gas = tester.calculateSaleReturn(supply, balance, weight, amount)
minGas = min(minGas, gas)
maxGas = max(maxGas, gas)
totalGas += gas
countGas += 1
print('Test {} out of {}: gas = {}, minimum = {}, maximum = {}, average = {}'.format(testNum, numOfTests, gas, minGas, maxGas, totalGas // countGas))
except:
pass
Main()
| 35.163636
| 177
| 0.641675
|
acfe4222bf1b8173f9a647a218772116ee1da311
| 8,002
|
py
|
Python
|
GUIFinal.py
|
ElefthManolis/NASA-Space-Apps-Challenge-2021
|
a2c5109f34b4b53a6295c1afbaa20b68c2830e16
|
[
"Apache-2.0"
] | 1
|
2021-10-09T17:56:16.000Z
|
2021-10-09T17:56:16.000Z
|
GUIFinal.py
|
ElefthManolis/NASA-Space-Apps-Challenge-2021
|
a2c5109f34b4b53a6295c1afbaa20b68c2830e16
|
[
"Apache-2.0"
] | null | null | null |
GUIFinal.py
|
ElefthManolis/NASA-Space-Apps-Challenge-2021
|
a2c5109f34b4b53a6295c1afbaa20b68c2830e16
|
[
"Apache-2.0"
] | null | null | null |
from tkinter import *
from tkinter import filedialog
from PIL import ImageTk, Image
#import matplotlib.pyplot as plt
from matplotlib.figure import Figure
import matplotlib
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import os
#In this function we check if the variables are valid
# Function for opening the
# file explorer window
def browseFiles():
filename = filedialog.askopenfilename(initialdir = os.getcwd(),
title = "Select a File",
filetypes = (("stl files",
"*.stl*"),
("all files",
"*.*"),
("Text files",
"*.txt*")))
# Change label contents
label_file_explorer.configure(text="File Opened: " + filename)
def popup_window_1():
pop_1 = Toplevel()
pop_1.title('Angle between Earth and the asteroid')
h1 = Label(pop_1, text='Fun Fact!', font='Helvetica')
h1.pack(pady=5)
label1 = Label(pop_1,
text= 'Lucy is the first space mission that will study the trojan asteroids,\n o group of asteroids that leads and follows Jupiter in it’s orbit.\n The mission will launch in October 2021.\n If you want to explore the light curves of the trojans,\n make sure to enter the 3D model of a trojan asteroid,\n and to not exceed the point of 11o in this field,\n as it is not possible to observe them in greater angles,\n due to the geometry of the Earth’s and Jupiter’s orbits.')
label1.pack(fill='x', padx=5, pady=5)
button_close = Button(pop_1, text="Close", command=pop_1.destroy)
button_close.pack(pady=50)
def popup_window_2():
pop_2 = Toplevel()
pop_2.title('But what is Albedo?')
h2 = Label(pop_2, text='Fun Fact!', font='Helvetica')
h2.pack(pady=5)
label2 = Label(pop_2,
text= 'Albedo is the measure of the diffuse reflection of\n solar radiation out of the total solar radiation.\n It is measured on a scale from 0, corresponding to a black body\n that absorbs all incident radiation, to 1,\n corresponding to a body that reflects all incident radiation.')
label2.pack(fill='x', padx=5, pady=5)
button_close = Button(pop_2, text="Close", command=pop_2.destroy)
button_close.pack(pady=50)
# Create the root window
window = Tk()
# Set window title
window.title('ILLUMINAT3D')
# Set window size
window.geometry("1280x720")
#window.eval('tk::PlaceWindow . center')
#Set window background color
window.config(background = "white")
#images
canvas1 = Canvas(window, width = 273, height = 100)
img1 = ImageTk.PhotoImage(Image.open("Initial Rotation.png"))
canvas1.create_image(0, 0, anchor=NW, image=img1)
canvas2 = Canvas(window, width = 273, height = 100)
img2 = ImageTk.PhotoImage(Image.open("Rotation Axis.png"))
canvas2.create_image(0, 0, anchor=NW, image=img2)
canvas3 = Canvas(window, width = 273, height = 100)
img3 = ImageTk.PhotoImage(Image.open("Source-Earth angle.png"))
canvas3.create_image(0, 0, anchor=NW, image=img3)
#Create logo label
logo2 = ImageTk.PhotoImage(Image.open("logo1.png"))
logo_img = Label(window, image = logo2)
# Create a File Explorer label
label_file_explorer = Label(window,
text = "In this field you can enter the 3D model that you want to study. You can pick one from the NASA’s library, that can be found in this link: https://echo.jpl.nasa.gov/asteroids/shapes/shapes.html .\n You can, also, choose to enter your own 3D model of an asteroid, a basic geometric shape, or everything that you can possibly imagine, but make sure that you upload an “.stl” file.",
fg = "blue")
info = PhotoImage(file = r"info.png")
info_button_1 = Button(window,
image = info,
command = popup_window_1)
info_button_2 = Button(window,
image = info,
command = popup_window_2)
button_explore = Button(window,
text = "Browse Files",
command = browseFiles)
button_exit = Button(window,
text = "Exit",
command = exit,
fg = "red")
def checkInputs():
rotationAxis = rotAxis.get().split(", ")
inititalRotation = initRot.get().split(", ")
try:
float(omega.get())
inititalRotation = [float(i) for i in inititalRotation]
rotationAxis = [float(i) for i in rotationAxis]
int(frames.get())
float(albedo.get())
except ValueError:
return False
if not (float(albedo.get()) >= 0 and float(albedo.get()) <= 1):
return False
from Illuminated_Class_Git import Illuminated
obj = Illuminated(label_file_explorer.cget("text").split("/")[-1], inititalRotation, rotationAxis, int(frames.get()), float(albedo.get()), float(omega.get()))
x_axis, y_axis = obj.execution()
matplotlib.use("TkAgg")
figure = Figure(figsize=(4, 4), dpi=100)
# Define the points for plotting the figure
plot = figure.add_subplot(1, 1, 1)
plot.plot(0.5, 0.3, color="blue", marker="o", linestyle="")
# Define Data points for x and y axis
plot.plot(x_axis, y_axis, color="red", marker=".", linestyle="")
# Add a canvas widget to associate the figure with canvas
canvas = FigureCanvasTkAgg(figure, window)
canvas.get_tk_widget().grid(row=6, column=5)
button_run = Button(window,
text = "Run Program",
command = checkInputs)
label_input1 = Label(window,
text = "Source-Earth Angle (e.g. 90):",
width = 60, height = 3,
fg = "blue")
label_input2 = Label(window,
text = "Initial Rotation (e.g. 0, 90, 90, 20):",
width = 60, height = 3,
fg = "blue")
label_input3 = Label(window,
text = "Rotation Axis (e.g. 90, 90, 0):",
width = 60, height = 3,
fg = "blue")
label_input4 = Label(window,
text = "Frames:",
width = 60, height = 3,
fg = "blue")
label_input5 = Label(window,
text = "Albedo (from 0 to 1):",
width = 60, height = 3,
fg = "blue")
omega = StringVar()
initRot = StringVar()
rotAxis = StringVar()
frames = StringVar()
albedo = StringVar()
firstVariableEntry = Entry(window, textvariable = omega)
secondVariableEntry = Entry(window, textvariable = initRot)
thirdVariableEntry = Entry(window, textvariable = rotAxis)
fourthVariableEntry = Entry(window, textvariable = frames)
albedoEntry = Entry(window, textvariable = albedo)
logo_img.grid(column=1, row=0)
label_file_explorer.grid(row = 2, column=0, columnspan=4)
button_explore.grid(column = 1, row = 3)
info_button_1.grid(column=3, row=4)
info_button_2.grid(column=3, row=5)
label_input1.grid(column = 0, row = 4, sticky = W, pady = 2)
label_input2.grid(column = 0, row = 8, sticky = W, pady = 2)
label_input3.grid(column = 0, row = 6, sticky = W, pady = 2)
canvas1.grid(column = 2, row = 8, sticky = W, pady = 2)
canvas2.grid(column = 2, row = 6, sticky = W, pady = 2)
canvas3.grid(column = 2, row = 4, sticky = W, pady = 2)
label_input4.grid(column = 0, row = 7, sticky = W, pady = 2)
label_input5.grid(column = 0, row = 5, sticky = W, pady = 2)
firstVariableEntry.grid(row = 4, column = 1, pady = 2)
secondVariableEntry.grid(row = 8, column = 1, pady = 2)
thirdVariableEntry.grid(row = 6, column = 1, pady = 2)
fourthVariableEntry.grid(row = 7, column = 1, pady = 2)
albedoEntry.grid(row = 5, column = 1, pady = 2)
button_run.grid(column = 1,row = 9)
button_exit.grid(column = 1,row = 10)
window.mainloop()
| 34.791304
| 492
| 0.613347
|
acfe428ba1b267c754471d0574002d2e8eb6b78b
| 8,574
|
py
|
Python
|
generator/lib/commands.py
|
tedteng/ops-toolbelt
|
6b84e8425ccb8a7d2be19f813b34cbd58c565a65
|
[
"Apache-2.0"
] | null | null | null |
generator/lib/commands.py
|
tedteng/ops-toolbelt
|
6b84e8425ccb8a7d2be19f813b34cbd58c565a65
|
[
"Apache-2.0"
] | null | null | null |
generator/lib/commands.py
|
tedteng/ops-toolbelt
|
6b84e8425ccb8a7d2be19f813b34cbd58c565a65
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from lib import components, dockerfile
class Command:
def __init__(self, components, dockerfile_instruction):
self.components = components
self.dockerfile_instruction = dockerfile_instruction
def get_lines(self):
pass
def get_tools(self):
return [component for component in self.components]
def get_tool_names(self):
return [component.name for component in self.components]
def get_tool_infos(self):
return [component.get_info() for component in self.components]
def join(self, other):
return self.components.extend(other.components)
def version_check(self):
pass
class AptGet(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
return (line for line in [
"apt-get --yes update && apt-get --yes install {}".format(' '.join(self.get_tool_names())),
"rm -rf /var/lib/apt/lists"
])
class Curl(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
for tool in self.components:
line = ""
if tool.get_to() is not None:
line = "curl -sLf {} -o {}".format(tool.get_from(), tool.get_to())
else:
line = "curl -sLf {} -o /bin/{} && chmod 755 /bin/{}".format(tool.get_from(), tool.get_name(), tool.get_name())
if tool.get_command() is not None:
line = line + "; {}".format(tool.get_command().rstrip())
yield line
class Execute(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
return (component.get_command().rstrip() for component in self.components)
class Copy(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.COPY)
def get_lines(self):
for component in self.components:
yield "{} {}".format(component.get_from(), component.get_to())
class Pip(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
yield "pip install {}".format(' '.join(self.get_tool_names()))
class Export(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.ENV)
def get_lines(self):
yield ' '.join(self.get_tool_names())
class AddAptGetRepo(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
command = [
'apt-get --yes update && apt-get --yes install lsb-release gnupg apt-transport-https',
]
for component in self.components:
release_prefix = component.get_release_prefix()
repo_name = component.get_name()
repo_url = component.get_repo_url()
key_url = component.get_key_url()
download_command = [
'REPO="{}$(lsb_release -cs)"'.format(release_prefix),
'echo "deb {} $REPO main" | tee /etc/apt/sources.list.d/{}.list'.format(repo_url, repo_name),
'curl -sL {} | apt-key add -'.format(key_url),
]
command.extend(download_command)
command.extend([
"apt-get --yes --purge remove lsb-release gnupg apt-transport-https",
"rm -rf /var/lib/apt/lists"
])
return (line for line in command)
class Git(Command):
def __init__(self, components):
Command.__init__(self, components, dockerfile.RUN)
def get_lines(self):
command_lines = []
for component in self.components:
command_lines.append("git -c http.sslVerify=false clone {} {}".format(component.get_from(), component.get_to()))
if component.version is not None:
command_lines.append('git -C {} checkout {}'.format(component.get_to(), component.get_version()))
return (line for line in command_lines)
class InfoGenerator:
def __init__(self, commands):
self.commands = commands
def generate_help_command_info(self):
apt_get_commands = []
pip_commands = []
downloaded_commands = []
for command in self.commands:
if isinstance(command, AptGet):
apt_get_commands.extend(InfoGenerator._get_package_name_and_bins(command))
if isinstance(command, Pip):
pip_commands.extend(InfoGenerator._get_package_name_and_bins(command))
if isinstance(command, (Curl, Git)):
command_tools = command.get_tools()
for tool in command_tools:
if tool.get_info() is not None:
downloaded_commands.append((tool.get_name(), tool.get_version(), tool.get_info()))
command_config= {
"apt": apt_get_commands,
"pip": pip_commands,
"downloaded": downloaded_commands,
}
return json.dumps(command_config)
def generate_welcome_message(self):
basic_tools = []
custom_shell_commands = []
exported_environments = []
for command in self.commands:
if isinstance(command, (AptGet, Copy, Curl, Pip, Git)):
basic_tools.extend([info for info in command.get_tool_infos() if info is not None])
elif isinstance(command, (Execute)):
custom_shell_commands.extend([info for info in command.get_tool_infos() if info is not None])
elif isinstance(command, Export):
exported_environments.extend([info for info in command.get_tool_infos() if info is not None])
return """
{}\\n\\
\\n\\
{}\\n\\
The following variables have been exported:\\n\\
{}""".format(' '.join(basic_tools), '\\n\\\n'.join(custom_shell_commands), ' '.join(exported_environments))
@staticmethod
def _get_package_name_and_bins(command):
command_tools = command.get_tools()
to_return = []
for tool in command_tools:
name = tool.get_name()
binaries = tool.get_provided_apps()
to_return.append((name, binaries))
return to_return
class CommandFactory:
@staticmethod
def create(config):
name = list(config.keys())[0]
pair = registry.get(name)
if pair is None:
print("{} is not a supported dockerfile config command".format(name))
exit(1)
command_class = pair[0]
component_classes = list(pair[1])
component_parser = components.ComponentConfigParser(*component_classes)
parsed_components = component_parser.parse_components(config[name])
instance = command_class(parsed_components)
return instance
class CommandRegistry:
def __init__(self):
self._registry = dict()
def register_command(self, name, command_class, *argv):
self._registry[name] = (command_class, list(argv))
def get(self, name):
return self._registry.get(name)
registry = CommandRegistry()
registry.register_command("apt-get", AptGet, components.StringComponentConfig, components.DictComponentConfig)
registry.register_command("curl", Curl, components.ToolConfig)
registry.register_command("bash", Execute, components.BashCommandConfig)
registry.register_command("copy", Copy, components.ToolConfig)
registry.register_command("pip", Pip, components.StringComponentConfig, components.DictComponentConfig)
registry.register_command("env", Export, components.StringComponentConfig)
registry.register_command("add-apt-repo", AddAptGetRepo, components.AptRepoConfig)
registry.register_command("git", Git, components.ToolConfig)
| 37.770925
| 201
| 0.652787
|
acfe42c154871b7fef7d97bb5e19390274a37dac
| 3,368
|
py
|
Python
|
examples/spn_custom_leaf.py
|
deeprob-org/deeprob-kit
|
c46050eb8047dcfa0cc2420887624184c042e32e
|
[
"MIT"
] | 38
|
2021-09-27T11:39:23.000Z
|
2022-02-09T15:33:44.000Z
|
examples/spn_custom_leaf.py
|
deeprob-org/deeprob-kit
|
c46050eb8047dcfa0cc2420887624184c042e32e
|
[
"MIT"
] | 14
|
2021-09-27T15:04:46.000Z
|
2021-12-08T21:08:01.000Z
|
examples/spn_custom_leaf.py
|
deeprob-org/deeprob-kit
|
c46050eb8047dcfa0cc2420887624184c042e32e
|
[
"MIT"
] | 3
|
2021-09-30T08:05:06.000Z
|
2022-01-02T04:44:19.000Z
|
import numpy as np
import scipy.stats as ss
import deeprob.spn.structure as spn
import deeprob.spn.algorithms as spnalg
import deeprob.spn.utils as spnutils
from deeprob.spn.learning import learn_spn
class Cauchy(spn.Leaf):
LEAF_TYPE = spn.LeafType.CONTINUOUS
def __init__(self, scope: int, loc: float = 0.0, scale: float = 1.0):
super().__init__(scope)
self.loc = loc
self.scale = scale
def fit(self, data: np.ndarray, domain: tuple, **kwargs):
self.loc, self.scale = ss.cauchy.fit(data)
def em_init(self, random_state: np.random.RandomState):
raise NotImplemented("EM parameters initialization not yet implemented for Cauchy distributions")
def em_step(self, stats: np.ndarray, data: np.ndarray, step_size: float):
raise NotImplemented("EM step not yet implemented for Cauchy distributions")
def likelihood(self, x: np.ndarray) -> np.ndarray:
ls = np.ones([len(x), 1], dtype=np.float32)
mask = np.isnan(x)
ls[~mask] = ss.cauchy.pdf(x[~mask], loc=self.loc, scale=self.scale)
return ls
def log_likelihood(self, x: np.ndarray) -> np.ndarray:
lls = np.ones([len(x), 1], dtype=np.float32)
mask = np.isnan(x)
lls[~mask] = ss.cauchy.logpdf(x[~mask], loc=self.loc, scale=self.scale)
return lls
def mpe(self, x: np.ndarray) -> np.ndarray:
x = np.copy(x)
mask = np.isnan(x)
x[mask] = self.loc
return x
def sample(self, x: np.ndarray) -> np.ndarray:
x = np.copy(x)
mask = np.isnan(x)
x[mask] = ss.cauchy.rvs(loc=self.loc, scale=self.scale, size=np.count_nonzero(mask))
return x
def moment(self, k: int = 1) -> float:
return ss.cauchy.moment(k)
def params_count(self) -> int:
return 2
def params_dict(self) -> dict:
return {'loc': self.loc, 'scale': self.scale}
if __name__ == '__main__':
# Sample some random data
random_state = np.random.RandomState(42)
n_samples, n_features = 1000, 4
data = random_state.randn(n_samples, n_features)
# Learn a SPN from data using Cauchy distributions at leaves
distributions = [Cauchy] * n_features
domains = [(-9.0, 9.0)] * n_features
root = learn_spn(
data, distributions, domains,
learn_leaf='mle', # The MLE learn leaf method will use the fit() method of leaf's class
random_state=random_state # Set the random state manually
)
# Compute the average likelihood
ls = spnalg.likelihood(root, data)
print("Average Likelihood: {:.4f}".format(np.mean(ls)))
# Print some statistics about the model's structure and parameters
print("SPN structure and parameters statistics:")
print(spnutils.compute_statistics(root))
# Save the model to a JSON file
spn_filename = 'spn-custom-cauchy.json'
print("Saving the SPN structure and parameters to {} ...".format(spn_filename))
spn.save_spn_json(root, spn_filename)
del root
# Reload the model from file
# Note that we need to specify the custom leaf
print("Re-loading the SPN structure and parameters from {} ...".format(spn_filename))
root = spn.load_spn_json('spn-custom-cauchy.json', leaves=[Cauchy])
ls = spnalg.likelihood(root, data)
print("Average Likelihood: {:.4f}".format(np.mean(ls)))
| 35.083333
| 105
| 0.65291
|
acfe437efd31384d38fe8badf91721c153fa3cdb
| 9,075
|
py
|
Python
|
tests/test_bytehand.py
|
AOrazaev/bytehand
|
f1bb7591e4192238cf04137a3c4a26512c188078
|
[
"MIT"
] | null | null | null |
tests/test_bytehand.py
|
AOrazaev/bytehand
|
f1bb7591e4192238cf04137a3c4a26512c188078
|
[
"MIT"
] | null | null | null |
tests/test_bytehand.py
|
AOrazaev/bytehand
|
f1bb7591e4192238cf04137a3c4a26512c188078
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import bytehand
import unittest
import urllib
import requests
import urlparse
import json
class TestCaseWithPatchedRequests(unittest.TestCase):
"""Monkey-patch requests.get and requests.post functions for tests
"""
ok_response = requests.Response()
ok_response.status_code = 200
ok_response._content = (
'[{"status": "0", "description": "4242424242"}]'
)
balance_response = requests.Response()
balance_response.status_code = 200
balance_response._content = (
'{"status": "0", "description": "100500.00"}'
)
signature_response = requests.Response()
signature_response.status_code = 200
signature_response._content = (
'[{"id": 42, "description": "Test signature",'
' "text": "Peter", "created_at": "2042-12-31",'
' "state": "ACCEPTED"}]'
)
new_signature_response = requests.Response()
new_signature_response.status_code = 200
new_signature_response._content = (
'{"status": "0", "description": "42"}'
)
def setUp(self):
self.requests_request = requests.request
self.last_url = None
self.request_method = None
self.post_data = None
self.request_urls = []
def patched_request(method, url, **kwargs):
self.last_url = url
self.request_urls.append(url)
self.post_data = kwargs.get('data')
self.request_method = method
if urlparse.urlparse(url).path.endswith('balance'):
return self.balance_response
elif urlparse.urlparse(url).path.endswith('signatures'):
return self.signature_response
elif urlparse.urlparse(url).path.endswith('signature'):
return self.new_signature_response
return self.ok_response
requests.request = patched_request
def tearDown(self):
requests.request = self.requests_request
del self.requests_request
del self.last_url
del self.post_data
class TestBytehandSendSms(TestCaseWithPatchedRequests):
def test_send_sms(self):
assert self.last_url == None
bytehand.send_sms(7771234567, 'Patric', 'Hello, Kate!',
1342, 'Password')
expected_url = urlparse.urljoin(bytehand.API_URL,
'send_multi?id=1342&key=Password')
self.assertEqual(self.last_url, expected_url)
self.assertEqual(
json.dumps([{'to': '7771234567', 'from': 'Patric',
'text': 'Hello, Kate!'}]),
self.post_data
)
def test_send_sms_raises(self):
assert self.last_url == None
send = lambda: bytehand.send_sms('bad_phone_number',
'Hello, Kate!',
'Patric', 1242, 'Pa$$w0rd')
self.assertRaises(TypeError, send)
self.assertEqual(self.last_url, None)
send = lambda: bytehand.send_sms(7771234567, '', 'Patric',
1242, 'Pa$$w0rd')
self.assertRaises(TypeError, send)
self.assertEqual(self.last_url, None)
send = lambda: bytehand.send_sms(7771234567, 'Hello, Kate',
'', 1342, 'Pa$$w0rd')
self.assertRaises(TypeError, send)
self.assertEqual(self.last_url, None)
send = lambda: bytehand.send_sms(7771234567, 'Hello, Kate',
'Patric', 'id1342', 'Pa$$w0rd')
self.assertRaises(TypeError, send)
self.assertEqual(self.last_url, None)
class TestBytehandConnection(TestCaseWithPatchedRequests):
def test_connection(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
msg = {"to": "77712345678",
"from": "Mom",
"text": "Don't be late!"}
status = conn.send(to=msg['to'], signature=msg['from'],
text=msg['text'])
self.assertEqual(
self.last_url,
urlparse.urljoin(bytehand.API_URL,
'send_multi?id=1342&key=MYKEY4321')
)
expected_data = json.dumps([msg])
self.assertEqual(self.post_data, expected_data)
def test_send_multi(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
msgs = [{"to": "777712345678", "from": "Mom",
"text": "Don't be late!"},
{"to": "749512345678", "from": "Dad",
"text": "Don't listen your mother."}]
status = conn.send_multi(msgs)
self.assertEqual(
self.last_url,
urlparse.urljoin(bytehand.API_URL,
'send_multi?id=1342&key=MYKEY4321')
)
expected_data = json.dumps(msgs)
self.assertEqual(self.post_data, expected_data)
def test_details(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
details = conn.details('12345')
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'details')
)
self.assertEqual(
dict(kv.split('=') for kv in parsed_url.query.split('&')),
dict(id='1342', key='MYKEY4321', message='12345')
)
def test_balance(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
balance = conn.balance()
self.assertEqual(balance, '100500.00')
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'balance')
)
self.assertEqual(
dict(kv.split('=') for kv in parsed_url.query.split('&')),
dict(id='1342', key='MYKEY4321')
)
def test_signatures(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
self.assertRaises(
ValueError,
lambda: conn.signatures(state='unknown_state')
)
def check_signature_url(state=None):
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'signatures')
)
expect = dict(id='1342', key='MYKEY4321')
expect.update({'state': state} if state is not None else {})
self.assertEqual(
dict(kv.split('=') for kv in parsed_url.query.split('&')),
expect
)
conn.signatures(state='new')
check_signature_url(state='NEW')
conn.signatures(state='accepted')
check_signature_url(state='ACCEPTED')
conn.signatures(state='REJECTED')
check_signature_url(state='REJECTED')
conn.signatures()
check_signature_url()
def test_signature(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
result = conn.signature('Peter')
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'signatures')
)
self.assertEqual(
dict(kv.split('=') for kv in parsed_url.query.split('&')),
dict(id='1342', key='MYKEY4321')
)
self.assertEqual(result, self.signature_response.json()[0])
self.assertRaises(
LookupError,
lambda: conn.signature('NoSuchSignature')
)
def test_new_signature(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
test_sign = 'test'
test_description = 'юникод: некоторая тестовая подпись'
conn.new_signature(test_sign, test_description)
self.assertEqual(self.request_method, 'post')
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'signature')
)
self.assertEqual(
parsed_url.query,
urllib.urlencode(dict(id='1342', key='MYKEY4321', text=test_sign))
)
self.assertEqual(self.post_data, {'description': test_description})
def test_delete_signature(self):
conn = bytehand.Connection(userid=1342, key='MYKEY4321')
conn.delete_signature('Peter')
self.assertEqual(self.request_method, 'delete')
parsed_url = urlparse.urlparse(self.last_url)
self.assertEqual(
parsed_url._replace(query='').geturl(),
urlparse.urljoin(bytehand.API_URL, 'signature')
)
self.assertEqual(
dict(kv.split('=') for kv in parsed_url.query.split('&')),
dict(id='1342', key='MYKEY4321', signature='42')
)
if __name__ == '__main__':
unittest.main()
| 34.770115
| 78
| 0.582369
|
acfe44009f7765aa273c463bb3565012d37a363e
| 474
|
py
|
Python
|
django/cantusdb_project/articles/views.py
|
DDMAL/CantusDB
|
63c7b8df3c703008bd331097c7fb5e72c8890bb0
|
[
"MIT"
] | 2
|
2020-10-16T09:50:54.000Z
|
2021-07-29T08:38:34.000Z
|
django/cantusdb_project/articles/views.py
|
DDMAL/CantusDB
|
63c7b8df3c703008bd331097c7fb5e72c8890bb0
|
[
"MIT"
] | 65
|
2020-05-11T19:22:30.000Z
|
2022-03-30T22:30:02.000Z
|
django/cantusdb_project/articles/views.py
|
DDMAL/CantusDB
|
63c7b8df3c703008bd331097c7fb5e72c8890bb0
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views.generic import DetailView, ListView
from articles.models import Article
class ArticleDetailView(DetailView):
model = Article
context_object_name = "article"
template_name = "article_detail.html"
class ArticleListView(ListView):
model = Article
queryset = Article.objects.order_by("-date_created")
paginate_by = 100
context_object_name = "articles"
template_name = "article_list.html"
| 26.333333
| 56
| 0.761603
|
acfe449a60ab82ad1da091019ce6e0c885459091
| 5,490
|
py
|
Python
|
ac2/plugins/metadata/lastfm.py
|
schnabel/audiocontrol2
|
baee6def6f6d25b8da1bcdacb6b6b6714aafce33
|
[
"MIT"
] | 36
|
2020-01-10T23:59:03.000Z
|
2022-03-28T07:04:16.000Z
|
ac2/plugins/metadata/lastfm.py
|
schnabel/audiocontrol2
|
baee6def6f6d25b8da1bcdacb6b6b6714aafce33
|
[
"MIT"
] | 22
|
2019-11-04T05:46:52.000Z
|
2022-03-22T08:18:59.000Z
|
ac2/plugins/metadata/lastfm.py
|
schnabel/audiocontrol2
|
baee6def6f6d25b8da1bcdacb6b6b6714aafce33
|
[
"MIT"
] | 16
|
2020-06-12T16:34:51.000Z
|
2022-01-23T17:24:11.000Z
|
'''
Copyright (c) 2019 Modul 9/HiFiBerry
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import time
import logging
import datetime
from threading import Thread
from usagecollector.client import report_usage
from ac2.plugins.metadata import MetadataDisplay
import pylast
class ScrobbleSender(Thread):
def __init__(self, lastfm, metadata):
super().__init__()
self.lastfm = lastfm
self.metadata = metadata
def run(self):
try:
logging.info("scrobbling " + str(self.metadata))
unix_timestamp = int(time.mktime(
datetime.datetime.now().timetuple()))
self.lastfm.scrobble(
artist=self.metadata.artist,
title=self.metadata.title,
timestamp=unix_timestamp)
except Exception as e:
logging.error("Could not scrobble %s/%s: %s",
self.metadata.artist,
self.metadata.title,
e)
self.network = None
class LastFMScrobbler(MetadataDisplay):
def __init__(self, api_key, api_secret,
username, password,
password_hash=None,
network="lastfm"):
super().__init__()
if password_hash is None:
password_hash = pylast.md5(password)
self.username = username
self.password_hash = password_hash
self.networkname = network.lower()
self.api_key = api_key
self.api_secret = api_secret
self.current_metadata = None
self.starttime = 0
self.network = None
def get_network(self):
if self.network is not None:
return self.network
if self.networkname == "lastfm":
self.network = pylast.LastFMNetwork(
api_key=self.api_key,
api_secret=self.api_secret,
username=self.username,
password_hash=self.password_hash)
elif self.netnetworkname == "librefm":
self.network = pylast.LibreFMNetwork(
api_key=self.api_key,
api_secret=self.api_secret,
username=self.username,
password_hash=self.password_hash)
else:
raise RuntimeError("Network {} unknown".format(self.networkname))
if self.network is not None:
self.network.enable_caching()
return self.network
def love(self, love):
try:
track = self.get_network().get_track(self.current_metadata.artist,
self.current_metadata.title)
if love:
logging.info("sending love to Last.FM")
track.love()
report_usage("audiocontrol_lastfm_love", 1)
else:
logging.info("sending unlove to Last.FM")
track.unlove()
report_usage("audiocontrol_lastfm_love", 1)
except Exception as e:
logging.warning("got exception %s while love/unlove", e)
return False
return True
def notify(self, metadata):
"""
Scrobble metadata of last song, store meta data of the current song
"""
if metadata is not None and metadata.sameSong(self.current_metadata):
self.current_metadata = metadata
logging.debug("updated metadata for current song, not scrobbling now")
return
# Check if the last song was played at least 30 seconds, otherwise
# don't scrobble it'
now = time.time()
listening_time = (now - self.starttime)
lastsong_md = None
if listening_time > 30:
lastsong_md = self.current_metadata
else:
logging.debug("not yet logging %s, not listened for at least 30s",
lastsong_md)
self.starttime = now
logging.info("new song: %s", metadata)
self.current_metadata = metadata
if (lastsong_md is not None) and not(lastsong_md.is_unknown()):
sender = ScrobbleSender(self.get_network(), lastsong_md)
sender.start()
report_usage("audiocontrol_lastfm_scrobble", 1)
else:
logging.info("no track data, not scrobbling %s", lastsong_md)
def notify_volume(self, volume):
pass
def __str__(self):
return "lastfmscrobbler@{}".format(self.networkname)
| 34.3125
| 82
| 0.618033
|
acfe458f9326760072682aa3b97b9c696fd72aab
| 5,777
|
py
|
Python
|
neuralpredictors/layers/activations.py
|
kellirestivo/neuralpredictors
|
57205a90d2e3daa5f8746c6ef6170be9e35cb5f5
|
[
"MIT"
] | null | null | null |
neuralpredictors/layers/activations.py
|
kellirestivo/neuralpredictors
|
57205a90d2e3daa5f8746c6ef6170be9e35cb5f5
|
[
"MIT"
] | null | null | null |
neuralpredictors/layers/activations.py
|
kellirestivo/neuralpredictors
|
57205a90d2e3daa5f8746c6ef6170be9e35cb5f5
|
[
"MIT"
] | null | null | null |
import logging
from torch import nn as nn
from torch.nn import functional as F
import torch
import numpy as np
logger = logging.getLogger(__name__)
def elu1(x):
return F.elu(x, inplace=True) + 1.0
class Elu1(nn.Module):
"""
Elu activation function shifted by 1 to ensure that the
output stays positive. That is:
Elu1(x) = Elu(x) + 1
"""
def forward(self, x):
return elu1(x)
def log1exp(x):
return torch.log(1.0 + torch.exp(x))
class Log1Exp(nn.Module):
def forward(self, x):
return log1exp(x)
def adaptive_elu(x, xshift, yshift):
return F.elu(x - xshift, inplace=True) + yshift
class AdaptiveELU(nn.Module):
"""
ELU shifted by user specified values. This helps to ensure the output to stay positive.
"""
def __init__(self, xshift, yshift, **kwargs):
super(AdaptiveELU, self).__init__(**kwargs)
self.xshift = xshift
self.yshift = yshift
def forward(self, x):
return adaptive_elu(x, self.xshift, self.yshift)
class PiecewiseLinearExpNonlinearity(nn.Module):
def __init__(
self,
number_of_neurons,
bias=False,
initial_value=0.01,
vmin=-3,
vmax=6,
num_bins=50,
smooth_reg_weight=0,
smoothnes_reg_order=2,
):
super().__init__()
self.bias = bias
self.initial = initial_value
self.vmin = vmin
self.vmax = vmax
self.neurons = number_of_neurons
self.smooth_reg_weight = smooth_reg_weight
self.smoothnes_reg_order = smoothnes_reg_order
self.num_bins = 2 * int(num_bins / 2)
if self.bias:
self.b = torch.nn.Parameter(torch.empty((number_of_neurons,), dtype=torch.float32).fill_(self.initial))
self.a = torch.nn.Parameter(torch.empty((self.num_bins, self.neurons), dtype=torch.float32).fill_(0))
bins = np.linspace(self.vmin, self.vmax, self.num_bins + 1, endpoint=True).reshape(1, -1)
bins_mtx = np.tile(bins, [1, self.neurons, 1])
bins_mtx = np.transpose(bins_mtx, (0, 2, 1)).astype(np.float32)
# shape: 1, num_bins, num_neurons
self.bins = torch.nn.Parameter(torch.from_numpy(bins_mtx), requires_grad=False)
self.zero = torch.nn.Parameter(torch.zeros((1,), dtype=torch.float32), requires_grad=False)
def tent(self, x, a, b):
return torch.min(torch.max(self.zero, x - a), torch.max(self.zero, 2 * b - a - x)) / (b - a)
def linstep(self, x, a, b):
return torch.min(b - a, torch.max(self.zero, x - a)) / (b - a)
def smoothness_regularizer(self, verbose=False):
penalty = 0
kernel = torch.tensor(np.reshape([-1.0, 1.0], (1, 1, 2)), dtype=torch.float32).cuda()
w = torch.reshape(self.a, (-1, 1, self.num_bins)) # shape: neurons, 1, bins
for k in range(self.smoothnes_reg_order):
w = F.conv1d(w, kernel)
penalty += torch.sum(torch.mean(w**2, 1))
penalty = torch.sum(self.smooth_reg_weight * penalty)
if verbose:
logger.info(f"PieceWiseLinearExpNonLin, Smoothness penalty: {penalty}")
return penalty
def forward(self, x):
if self.bias:
# a bias is added
x = x + self.b
g = torch.nn.functional.elu(x - 1) + 1
xx = torch.reshape(x, (-1, 1, self.neurons))
# a tent function is applied on the data in multiple bins.
tents = self.tent(xx, self.bins[:, :-2, :], self.bins[:, 1:-1, :])
linstep = self.linstep(xx, self.bins[:, -2:-1, :], self.bins[:, -1:, :])
t = torch.cat((tents, linstep), dim=1)
# bins shape: 1, num_bins, num_neurons
# t shape: batch, bins, neurons
h = torch.sum(torch.exp(self.a) * t, dim=1)
return g * h
def visualize(self, vmin=None, vmax=None, iters=1000, show=True, return_fig=False, neurons=range(10)):
if vmin is None:
vmin = self.vmin - 1
if vmax is None:
vmax = self.vmax + 1
inpts = torch.from_numpy(np.tile(np.linspace(vmin, vmax, iters).astype(np.float32), [self.neurons, 1]).T).cuda()
outs = self.forward(inpts)
f = plt.figure()
ax = f.add_subplot(1, 1, 1)
ax.plot(inpts.cpu().detach().numpy()[:, neurons], outs.cpu().detach().numpy()[:, neurons])
ax.set_xlabel("Response before alteration")
ax.set_ylabel("Response after alteration")
plt.grid(which="both")
if show:
f.show()
if return_fig:
return f
class MultiplePiecewiseLinearExpNonlinearity(nn.ModuleDict):
def __init__(
self,
n_neurons_dict,
bias=False,
initial_value=0.01,
vmin=-3,
vmax=6,
num_bins=50,
smooth_reg_weight=0,
smoothnes_reg_order=2,
):
# super init to get the _module attribute
super().__init__()
for k in n_neurons_dict:
n_neurons = n_neurons_dict[k]
self.add_module(
k,
PiecewiseLinearExpNonlinearity(
number_of_neurons=n_neurons,
bias=bias,
initial_value=initial_value,
vmin=vmin,
vmax=vmax,
num_bins=num_bins,
smooth_reg_weight=smooth_reg_weight,
smoothnes_reg_order=smoothnes_reg_order,
),
)
def forward(self, *args, data_key=None, **kwargs):
if data_key is None and len(self) == 1:
data_key = list(self.keys())[0]
return self[data_key](*args, **kwargs)
def regularizer(self, data_key):
return self[data_key].smoothness_regularizer()
| 30.405263
| 120
| 0.584213
|
acfe45af2b1035c4416c589e0b2054e4dcd35e6a
| 2,613
|
py
|
Python
|
CIM15/CDPSM/Asset/IEC61968/AssetModels/OverheadConductorInfo.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 58
|
2015-04-22T10:41:03.000Z
|
2022-03-29T16:04:34.000Z
|
CIM15/CDPSM/Asset/IEC61968/AssetModels/OverheadConductorInfo.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 12
|
2015-08-26T03:57:23.000Z
|
2020-12-11T20:14:42.000Z
|
CIM15/CDPSM/Asset/IEC61968/AssetModels/OverheadConductorInfo.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 35
|
2015-01-10T12:21:03.000Z
|
2020-09-09T08:18:16.000Z
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.CDPSM.Asset.IEC61968.AssetModels.ConductorInfo import ConductorInfo
class OverheadConductorInfo(ConductorInfo):
"""Overhead conductor data.
"""
def __init__(self, neutralInsulationThickness=0.0, phaseConductorSpacing=0.0, phaseConductorCount=0, *args, **kw_args):
"""Initialises a new 'OverheadConductorInfo' instance.
@param neutralInsulationThickness: (if applicable) Insulation thickness of the neutral conductor.
@param phaseConductorSpacing: Distance between conductor strands in a symmetrical bundle.
@param phaseConductorCount: Number of conductor strands in the symmetrical bundle (1-12).
"""
#: (if applicable) Insulation thickness of the neutral conductor.
self.neutralInsulationThickness = neutralInsulationThickness
#: Distance between conductor strands in a symmetrical bundle.
self.phaseConductorSpacing = phaseConductorSpacing
#: Number of conductor strands in the symmetrical bundle (1-12).
self.phaseConductorCount = phaseConductorCount
super(OverheadConductorInfo, self).__init__(*args, **kw_args)
_attrs = ["neutralInsulationThickness", "phaseConductorSpacing", "phaseConductorCount"]
_attr_types = {"neutralInsulationThickness": float, "phaseConductorSpacing": float, "phaseConductorCount": int}
_defaults = {"neutralInsulationThickness": 0.0, "phaseConductorSpacing": 0.0, "phaseConductorCount": 0}
_enums = {}
_refs = []
_many_refs = []
| 50.25
| 123
| 0.753923
|
acfe45db2616724a48ec21c0ac385f6ed6303ab0
| 8,771
|
py
|
Python
|
vision/nn/mobilenetv3.py
|
chwj81/pytorch-ssd
|
1c666d498216b6a569016d010fc6f98b4d14a559
|
[
"MIT"
] | null | null | null |
vision/nn/mobilenetv3.py
|
chwj81/pytorch-ssd
|
1c666d498216b6a569016d010fc6f98b4d14a559
|
[
"MIT"
] | null | null | null |
vision/nn/mobilenetv3.py
|
chwj81/pytorch-ssd
|
1c666d498216b6a569016d010fc6f98b4d14a559
|
[
"MIT"
] | null | null | null |
'''MobileNetV3 in PyTorch.
See the paper "Inverted Residuals and Linear Bottlenecks:
Mobile Networks for Classification, Detection and Segmentation" for more details.
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import init
#class hswish(nn.Module):
# def forward(self, x):
# #out = x * F.relu6(x + 3, inplace=True) / 6
# out = F.hardswish(x, inplace=True)
#
# return out
#class hsigmoid(nn.Module):
# def forward(self, x):
# #out = F.relu6(x + 3, inplace=True) / 6
# out = F.hardsigmoid(x, inplace=True)
#
# return out
class SeModule(nn.Module):
def __init__(self, in_size, reduction=4):
super(SeModule, self).__init__()
self.se = nn.Sequential(
nn.AdaptiveAvgPool2d(1),
nn.Conv2d(in_size, in_size // reduction, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(in_size // reduction),
nn.ReLU(inplace=True),
nn.Conv2d(in_size // reduction, in_size, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(in_size),
nn.Hardsigmoid(inplace=True)
)
def forward(self, x):
return x * self.se(x)
class Block(nn.Module):
'''expand + depthwise + pointwise'''
def __init__(self, kernel_size, in_size, expand_size, out_size, nolinear, semodule, stride):
super(Block, self).__init__()
self.stride = stride
self.se = semodule
self.conv1 = nn.Conv2d(in_size, expand_size, kernel_size=1, stride=1, padding=0, bias=False)
self.bn1 = nn.BatchNorm2d(expand_size)
self.nolinear1 = nolinear
self.conv2 = nn.Conv2d(expand_size, expand_size, kernel_size=kernel_size, stride=stride, padding=kernel_size//2, groups=expand_size, bias=False)
self.bn2 = nn.BatchNorm2d(expand_size)
self.nolinear2 = nolinear
self.conv3 = nn.Conv2d(expand_size, out_size, kernel_size=1, stride=1, padding=0, bias=False)
self.bn3 = nn.BatchNorm2d(out_size)
self.shortcut = nn.Sequential()
if stride == 1 and in_size != out_size:
self.shortcut = nn.Sequential(
nn.Conv2d(in_size, out_size, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(out_size),
)
def forward(self, x):
out = self.nolinear1(self.bn1(self.conv1(x)))
out = self.nolinear2(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
if self.se != None:
out = self.se(out)
out = out + self.shortcut(x) if self.stride==1 else out
return out
class MobileNetV3_Large(nn.Module):
def __init__(self, num_classes=1000):
super(MobileNetV3_Large, self).__init__()
self.features = []
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=2, padding=1, bias=False)
self.features.append(self.conv1)
self.bn1 = nn.BatchNorm2d(16)
self.features.append(self.bn1)
self.hs1 = nn.Hardswish(inplace=True)
self.features.append(self.hs1)
self.bneck = nn.Sequential(
Block(3, 16, 16, 16, nn.ReLU(inplace=True), None, 1),
Block(3, 16, 64, 24, nn.ReLU(inplace=True), None, 2),
Block(3, 24, 72, 24, nn.ReLU(inplace=True), None, 1),
Block(5, 24, 72, 40, nn.ReLU(inplace=True), SeModule(40), 2),
Block(5, 40, 120, 40, nn.ReLU(inplace=True), SeModule(40), 1),
Block(5, 40, 120, 40, nn.ReLU(inplace=True), SeModule(40), 1),
Block(3, 40, 240, 80, nn.Hardswish(inplace=True), None, 2),
Block(3, 80, 200, 80, nn.Hardswish(inplace=True), None, 1),
Block(3, 80, 184, 80, nn.Hardswish(inplace=True), None, 1),
Block(3, 80, 184, 80, nn.Hardswish(inplace=True), None, 1),
Block(3, 80, 480, 112, nn.Hardswish(inplace=True), SeModule(112), 1),
Block(3, 112, 672, 112, nn.Hardswish(inplace=True), SeModule(112), 1),
Block(5, 112, 672, 160, nn.Hardswish(inplace=True), SeModule(160), 1),
Block(5, 160, 672, 160, nn.Hardswish(inplace=True), SeModule(160), 2),
Block(5, 160, 960, 160, nn.Hardswish(inplace=True), SeModule(160), 1),
)
self.features.extend([block for block in self.bneck])
self.conv2 = nn.Conv2d(160, 960, kernel_size=1, stride=1, padding=0, bias=False)
self.features.append(self.conv2)
self.bn2 = nn.BatchNorm2d(960)
self.features.append(self.bn2)
self.hs2 = nn.Hardswish(inplace=True)
self.features.append(self.hs2)
self.linear3 = nn.Linear(960, 1280)
self.bn3 = nn.BatchNorm1d(1280)
self.hs3 = nn.Hardswish(inplace=True)
self.linear4 = nn.Linear(1280, num_classes)
self.init_params()
self.features = nn.Sequential(*self.features)
def init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal_(m.weight, std=0.001)
if m.bias is not None:
init.constant_(m.bias, 0)
def forward(self, x):
out = self.hs1(self.bn1(self.conv1(x)))
out = self.bneck(out)
out = self.hs2(self.bn2(self.conv2(out)))
out = F.avg_pool2d(out, 7)
out = out.view(out.size(0), -1)
out = self.hs3(self.bn3(self.linear3(out)))
out = self.linear4(out)
return out
class MobileNetV3_Small(nn.Module):
def __init__(self, num_classes=1000):
super(MobileNetV3_Small, self).__init__()
self.features = []
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=2, padding=1, bias=False)
self.features.append(self.conv1)
self.bn1 = nn.BatchNorm2d(16)
self.features.append(self.bn1)
self.hs1 = nn.Hardswish(inplace=True)
self.features.append(self.hs1)
self.bneck = nn.Sequential(
Block(3, 16, 16, 16, nn.ReLU(inplace=True), SeModule(16), 2),
Block(3, 16, 72, 24, nn.ReLU(inplace=True), None, 2),
Block(3, 24, 88, 24, nn.ReLU(inplace=True), None, 1),
Block(5, 24, 96, 40, nn.Hardswish(inplace=True), SeModule(40), 2),
Block(5, 40, 240, 40, nn.Hardswish(inplace=True), SeModule(40), 1),
Block(5, 40, 240, 40, nn.Hardswish(inplace=True), SeModule(40), 1),
Block(5, 40, 120, 48, nn.Hardswish(inplace=True), SeModule(48), 1),
Block(5, 48, 144, 48, nn.Hardswish(inplace=True), SeModule(48), 1),
Block(5, 48, 288, 96, nn.Hardswish(inplace=True), SeModule(96), 2),
Block(5, 96, 576, 96, nn.Hardswish(inplace=True), SeModule(96), 1),
Block(5, 96, 576, 96, nn.Hardswish(inplace=True), SeModule(96), 1),
)
self.features.extend([block for block in self.bneck])
self.conv2 = nn.Conv2d(96, 576, kernel_size=1, stride=1, padding=0, bias=False)
self.features.append(self.conv2)
self.bn2 = nn.BatchNorm2d(576)
self.features.append(self.bn2)
self.hs2 = nn.Hardswish(inplace=True)
self.features.append(self.hs2)
self.linear3 = nn.Linear(576, 1280)
self.bn3 = nn.BatchNorm1d(1280)
self.hs3 = nn.Hardswish(inplace=True)
self.linear4 = nn.Linear(1280, num_classes)
self.init_params()
self.features = nn.Sequential(*self.features)
def init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal_(m.weight, std=0.001)
if m.bias is not None:
init.constant_(m.bias, 0)
def forward(self, x):
out = self.hs1(self.bn1(self.conv1(x)))
out = self.bneck(out)
out = self.hs2(self.bn2(self.conv2(out)))
out = F.avg_pool2d(out, 7)
out = out.view(out.size(0), -1)
out = self.hs3(self.bn3(self.linear3(out)))
out = self.linear4(out)
return out
def test():
net = MobileNetV3_Small()
x = torch.randn(2,3,224,224)
y = net(x)
print(y.size())
# test()
| 38.30131
| 152
| 0.587846
|
acfe4608023fec722341537aabb74d27eaf2f042
| 1,422
|
py
|
Python
|
tests/test_day10.py
|
FollowTheProcess/advent_of_code_2021
|
27427bf76732f5257d369a9455dbbb337dd7a0bc
|
[
"MIT"
] | null | null | null |
tests/test_day10.py
|
FollowTheProcess/advent_of_code_2021
|
27427bf76732f5257d369a9455dbbb337dd7a0bc
|
[
"MIT"
] | null | null | null |
tests/test_day10.py
|
FollowTheProcess/advent_of_code_2021
|
27427bf76732f5257d369a9455dbbb337dd7a0bc
|
[
"MIT"
] | null | null | null |
import pytest
from src.day10.day10 import (
calculate_syntax_score,
complete,
first_illegal_character,
get_middle,
)
RAW = """[({(<(())[]>[[{[]{<()<>>
[(()[<>])]({[<{<<[]>>(
{([(<{}[<>[]}>{[]{[(<()>
(((({<>}<{<{<>}{[]{[]{}
[[<[([]))<([[{}[[()]]]
[{[{({}]{}}([{[{{{}}([]
{<[[]]>}<{[{[{[]{()[[[]
[<(<(<(<{}))><([]([]()
<{([([[(<>()){}]>(<<{{
<{([{{}}[<[[[<>{}]]]>[]]"""
@pytest.mark.parametrize(
"line, char",
[
("{([(<{}[<>[]}>{[]{[(<()>", "}"),
("[[<[([]))<([[{}[[()]]]", ")"),
("[{[{({}]{}}([{[{{{}}([]", "]"),
("[<(<(<(<{}))><([]([]()", ")"),
("<{([([[(<>()){}]>(<<{{", ">"),
],
)
def test_get_first_illegal_character(line: str, char: str):
assert first_illegal_character(line) == char
def test_score_example_part1():
lines = RAW.strip().splitlines()
assert sum(calculate_syntax_score(line) for line in lines) == 26397
@pytest.mark.parametrize(
"line, completion",
[
("[({(<(())[]>[[{[]{<()<>>", "}}]])})]"),
("[(()[<>])]({[<{<<[]>>(", ")}>]})"),
("(((({<>}<{<{<>}{[]{[]{}", "}}>}>))))"),
("{<[[]]>}<{[{[{[]{()[[[]", "]]}}]}]}>"),
("<{([{{}}[<[[[<>{}]]]>[]]", "])}>"),
],
)
def test_complete(line: str, completion: str):
assert complete(line) == list(completion)
def test_score_example_part2():
lines = RAW.strip().splitlines()
assert get_middle(lines) == 288957
| 23.311475
| 71
| 0.351617
|
acfe479a76175ffcd99eb4d4a2f54daaf94ccca3
| 188
|
py
|
Python
|
src/bgmtinygrail/strategy/self_service.py
|
no1xsyzy/bgmtinygrail
|
4e762a58337f3021440a070967f1cb7a0213f8a6
|
[
"MIT"
] | 5
|
2020-05-17T02:41:01.000Z
|
2020-07-01T23:24:41.000Z
|
src/bgmtinygrail/strategy/self_service.py
|
no1xsyzy/bgmtinygrail
|
4e762a58337f3021440a070967f1cb7a0213f8a6
|
[
"MIT"
] | null | null | null |
src/bgmtinygrail/strategy/self_service.py
|
no1xsyzy/bgmtinygrail
|
4e762a58337f3021440a070967f1cb7a0213f8a6
|
[
"MIT"
] | 1
|
2021-02-09T04:41:15.000Z
|
2021-02-09T04:41:15.000Z
|
from ._base import *
class SelfServiceStrategy(ABCCharaStrategy):
strategy = Strategy.SELF_SERVICE
def transition(self):
return self
def output(self):
pass
| 15.666667
| 44
| 0.675532
|
acfe47f1c49c06db42900fc43e7ef3a75f51715b
| 1,320
|
py
|
Python
|
api/v1.py
|
gyl-coder/friends
|
f03439ebe83b3411209a2595a7545411cb718aa6
|
[
"MIT"
] | null | null | null |
api/v1.py
|
gyl-coder/friends
|
f03439ebe83b3411209a2595a7545411cb718aa6
|
[
"MIT"
] | 3
|
2021-08-07T03:02:39.000Z
|
2021-08-07T05:19:41.000Z
|
api/v1.py
|
gyl-coder/friends
|
f03439ebe83b3411209a2595a7545411cb718aa6
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
from flask import Flask, jsonify, redirect, make_response
import requests
import json
from bs4 import BeautifulSoup
import re
app = Flask(__name__)
# author: https://github.com/Zfour
def github_json(owner, repo, branch):
source_url = 'https://raw.githubusercontent.com/' + owner + '/' + repo + '/' + branch + '/generator/output/v1/data.json'
req = requests.get(source_url)
content = []
if req.content:
content = json.loads(req.content.decode())
resp = make_response(jsonify({'code': 0, 'source_url': source_url, 'content': content}))
resp.status = '200'
resp.headers['Access-Control-Allow-Origin'] = '*'
resp.headers['Access-Control-Allow-Methods'] = 'PUT,GET,POST,DELETE'
resp.headers['Content-Type'] = 'application/json; charset=utf-8'
return resp
@app.route('/v1/<owner>', methods=['GET'])
def start_owner(owner):
repo = 'friends'
branch = 'main'
return github_json(owner, repo, branch)
@app.route('/v1/<owner>/<repo>', methods=['GET'])
def start_owner_repo(owner, repo):
branch = 'main'
return github_json(owner, repo, branch)
@app.route('/v1/<owner>/<repo>/<branch>', methods=['GET'])
def start_owner_repo_branch(owner, repo, branch):
return github_json(owner, repo, branch)
| 33.846154
| 125
| 0.654545
|
acfe4913ea5fed97bf509ba0fdcac619ee54f5f2
| 71
|
py
|
Python
|
hello.py
|
strattan/gene211
|
c1f23e65f88bac5c0201abef9238db562e764751
|
[
"MIT"
] | null | null | null |
hello.py
|
strattan/gene211
|
c1f23e65f88bac5c0201abef9238db562e764751
|
[
"MIT"
] | null | null | null |
hello.py
|
strattan/gene211
|
c1f23e65f88bac5c0201abef9238db562e764751
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
print("Hello Gene211!")
sys.exit(0)
| 10.142857
| 23
| 0.690141
|
acfe4a268fa19ada74505b5a24c17869d1a20f1b
| 876
|
py
|
Python
|
tests/v1/test_logs_grok_parser_rules.py
|
MichaelTROEHLER/datadog-api-client-python
|
12c46626622fb1277bb1e172753b342c671348bd
|
[
"Apache-2.0"
] | null | null | null |
tests/v1/test_logs_grok_parser_rules.py
|
MichaelTROEHLER/datadog-api-client-python
|
12c46626622fb1277bb1e172753b342c671348bd
|
[
"Apache-2.0"
] | null | null | null |
tests/v1/test_logs_grok_parser_rules.py
|
MichaelTROEHLER/datadog-api-client-python
|
12c46626622fb1277bb1e172753b342c671348bd
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from __future__ import absolute_import
import sys
import unittest
import datadog_api_client.v1
from datadog_api_client.v1.model.logs_grok_parser_rules import LogsGrokParserRules
class TestLogsGrokParserRules(unittest.TestCase):
"""LogsGrokParserRules unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testLogsGrokParserRules(self):
"""Test LogsGrokParserRules"""
# FIXME: construct object with mandatory attributes with example values
# model = LogsGrokParserRules() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 25.764706
| 108
| 0.73516
|
acfe4a5435caa37f2cfd42d915b2aed25e5ae438
| 464
|
py
|
Python
|
rest/api/responsehelpers/http_response.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
rest/api/responsehelpers/http_response.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
rest/api/responsehelpers/http_response.py
|
estuaryoss/estuary-agent
|
b8b4264a616be21c86458da75cf29d13a8fb263d
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from flask import request
from about import properties
class HttpResponse:
@staticmethod
def response(code, message, description):
return {
"code": code,
"message": message,
"description": description,
"path": request.full_path,
"timestamp": str(datetime.datetime.now()),
"name": properties["name"],
"version": properties["version"]
}
| 22.095238
| 54
| 0.573276
|
acfe4a6de44c47679247f1b30d8908e2f273481e
| 19,803
|
py
|
Python
|
py3status/modules/xrandr.py
|
obestwalter/py3status
|
a79d50ae252626bacb14bfcc8f369e59ae500fd1
|
[
"BSD-3-Clause"
] | null | null | null |
py3status/modules/xrandr.py
|
obestwalter/py3status
|
a79d50ae252626bacb14bfcc8f369e59ae500fd1
|
[
"BSD-3-Clause"
] | null | null | null |
py3status/modules/xrandr.py
|
obestwalter/py3status
|
a79d50ae252626bacb14bfcc8f369e59ae500fd1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Control screen layout.
This modules allows you to handle your screens outputs directly from your bar!
- Detect and propose every possible screen combinations
- Switch between combinations using click events and mouse scroll
- Activate the screen or screen combination on a single click
- It will detect any newly connected or removed screen automatically
For convenience, this module also proposes some added features:
- Dynamic parameters for POSITION and WORKSPACES assignment (see below)
- Automatic fallback to a given screen or screen combination when no more
screen is available (handy for laptops)
- Automatically apply this screen combination on start: no need for xorg!
- Automatically move workspaces to screens when they are available
- Define your own subset of output combinations to use
Configuration parameters:
cache_timeout: how often to (re)detect the outputs (default 10)
command: a custom command to be run after display configuration changes
(default None)
fallback: when the current output layout is not available anymore,
fallback to this layout if available. This is very handy if you
have a laptop and switched to an external screen for presentation
and want to automatically fallback to your laptop screen when you
disconnect the external screen. (default True)
fixed_width: show output as fixed width (default True)
force_on_change: switch display layout to the leftmost combination mode
of the given list whenever it is available. The combination modes are
checked from left (high priority) to right (less priority) until
one matches.
Example:
We have a laptop with internal screen and we are often moving from
our desk where another screen is available. We want the layout to
follow our changes so that we do not have to switch manually.
So whenever we plug at our desk, we want the second monitor to be
used, and whenever we go away we want everything back on the laptop
screen automatically:
```
force_on_change = ["eDP1+DP1", "eDP1"]
```
NOTES:
Click controls will override `force_on_change` until the layout
changes in the background so you can still manually control your
layout changes on the bar.
Use the `force_on_start` to handle initial layout setup on module
startup along with this feature to benefit from fully dynamic and
automated changes of screen layouts.
(default [])
force_on_start: switch to the given combination mode if available
when the module starts (saves you from having to configure xorg)
(default None)
format: display format for xrandr
(default '{output}')
hide_if_single_combination: hide if only one combination is available
(default False)
icon_clone: icon used to display a 'clone' combination
(default '=')
icon_extend: icon used to display a 'extend' combination
(default '+')
on_udev_drm: dynamic variable to watch for `drm` udev subsystem events to
trigger specified action.
(default 'refresh_and_freeze')
output_combinations: string used to define your own subset of output
combinations to use, instead of generating every possible combination
automatically. Provide the values in the format that this module uses,
splitting the combinations using '|' character.
The combinations will be rotated in the exact order as you listed them.
When an output layout is not available any more, the configurations
are automatically filtered out.
Example:
Assuming the default values for `icon_clone` and `icon_extend`
are used, and assuming you have two screens 'eDP1' and 'DP1', the
following setup will reduce the number of output combinations
from four (every possible one) down to two.
```
output_combinations = "eDP1|eDP1+DP1"
```
(default None)
Dynamic configuration parameters:
<OUTPUT>_icon: use this icon instead of OUTPUT name as text
Example: DP1_icon = "🖵"
<OUTPUT>_pos: apply the given position to the OUTPUT
Example: DP1_pos = "-2560x0"
Example: DP1_pos = "above eDP1"
Example: DP1_pos = "below eDP1"
Example: DP1_pos = "left-of LVDS1"
Example: DP1_pos = "right-of eDP1"
<OUTPUT>_workspaces: comma separated list of workspaces to move to
the given OUTPUT when it is activated
Example: DP1_workspaces = "1,2,3"
<OUTPUT>_rotate: rotate the output as told
Example: DP1_rotate = "left"
<OUTPUT>_mode: define the mode (resolution) for the output
if not specified use --auto : prefered mode
Example: eDP1_mode = "2560x1440
<OUTPUT>_primary: apply the primary to the OUTPUT
Example: DP1_primary = True
Format placeholders:
{output} xrandr output
Color options:
color_bad: Displayed layout unavailable
color_degraded: Using a fallback layout
color_good: Displayed layout active
Notes:
Some days are just bad days. Running `xrandr --query` command can
cause unexplainable brief screen freezes due to an overall combination
of computer hardware, installed software, your choice of linux distribution,
and/or some other unknown factors such as recent system updates.
Configuring `cache_timeout` with a different number, eg `3600` (an hour)
or `-1` (runs once) can be used to remedy this issue. See issue #580.
Examples:
```
# start with a preferable setup
xrandr {
force_on_start = "eDP1+DP1"
DP1_pos = "left-of eDP1"
VGA_workspaces = "7"
}
```
@author ultrabug
SAMPLE OUTPUT
{'color': '#00FF00', 'full_text': 'LVDS1+DP1'}
mirror
{'full_text': 'LVDS1=DP1'}
single_1
{'full_text': u'LVDS1'}
single_2
{'full_text': 'DP1'}
"""
from collections import deque
from collections import OrderedDict
from itertools import combinations
from time import sleep
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 10
command = None
fallback = True
fixed_width = True
force_on_change = []
force_on_start = None
format = "{output}"
hide_if_single_combination = False
icon_clone = "="
icon_extend = "+"
on_udev_drm = "refresh_and_freeze"
output_combinations = None
class Meta:
deprecated = {
"rename": [
{
"param": "format_clone",
"new": "icon_clone",
"msg": "obsolete parameter use `icon_clone`",
},
{
"param": "format_extend",
"new": "icon_extend",
"msg": "obsolete parameter use `icon_extend`",
},
]
}
def post_config_hook(self):
"""
Initialization
"""
self._no_force_on_change = True
self.active_comb = None
self.active_layout = None
self.active_mode = "extend"
self.displayed = None
self.max_width = 0
def _get_layout(self):
"""
Get the outputs layout from xrandr and try to detect the
currently active layout as best as we can on start.
"""
connected = list()
active_layout = list()
disconnected = list()
layout = OrderedDict(
{"connected": OrderedDict(), "disconnected": OrderedDict()}
)
current = self.py3.command_output("xrandr")
for line in current.splitlines():
try:
s = line.split(" ")
infos = line[line.find("(") :]
if s[1] == "connected":
output, state, mode = s[0], s[1], None
for index, x in enumerate(s[2:], 2):
if "x" in x and "+" in x:
mode = x
active_layout.append(output)
infos = line[line.find(s[index + 1]) :]
break
elif "(" in x:
break
connected.append(output)
elif s[1] == "disconnected":
output, state, mode = s[0], s[1], None
disconnected.append(output)
else:
continue
except Exception as err:
self.py3.log('xrandr error="{}"'.format(err))
else:
layout[state][output] = {"infos": infos, "mode": mode, "state": state}
# initialize the active layout
if self.active_layout is None:
self.active_comb = tuple(active_layout)
self.active_layout = self._get_string_and_set_width(
tuple(active_layout), self.active_mode
)
return layout
def _set_available_combinations(self):
"""
Generate all connected outputs combinations and
set the max display width while iterating.
"""
available = set()
combinations_map = {}
whitelist = None
if self.output_combinations:
whitelist = self.output_combinations.split("|")
self.max_width = 0
for output in range(len(self.layout["connected"])):
for comb in combinations(self.layout["connected"], output + 1):
for mode in ["clone", "extend"]:
string = self._get_string_and_set_width(comb, mode)
if whitelist and string not in whitelist:
continue
if len(comb) == 1:
combinations_map[string] = (comb, None)
else:
combinations_map[string] = (comb, mode)
available.add(string)
# Preserve the order in which user defined the output combinations
if whitelist:
available = reversed([comb for comb in whitelist if comb in available])
self.available_combinations = deque(available)
self.combinations_map = combinations_map
def _get_string_and_set_width(self, combination, mode):
"""
Construct the string to be displayed and record the max width.
"""
show = getattr(self, "icon_{}".format(mode)).join(
tuple(getattr(self, "{}_icon".format(x), x) for x in combination)
)
self.max_width = max([self.max_width, len(show)])
return show
def _choose_what_to_display(self, force_refresh=False):
"""
Choose what combination to display on the bar.
By default we try to display the active layout on the first run, else
we display the last selected combination.
"""
for _ in range(len(self.available_combinations)):
if (
self.displayed is None
and self.available_combinations[0] == self.active_layout
):
self.displayed = self.available_combinations[0]
break
else:
if self.displayed == self.available_combinations[0]:
break
else:
self.available_combinations.rotate(1)
else:
if force_refresh:
self.displayed = self.available_combinations[0]
else:
self.py3.log('xrandr error="displayed combination is not available"')
def _center(self, s):
"""
Center the given string on the detected max width.
"""
fmt = "{:^%d}" % self.max_width
return fmt.format(s)
def _apply(self, force=False):
"""
Call xrandr and apply the selected (displayed) combination mode.
"""
if self.displayed == self.active_layout and not force:
# no change, do nothing
return
combination, mode = self.combinations_map.get(self.displayed, (None, None))
if combination is None and mode is None:
# displayed combination cannot be activated, ignore
return
cmd = "xrandr"
outputs = list(self.layout["connected"].keys())
outputs += list(self.layout["disconnected"].keys())
previous_output = None
primary_added = False
for output in outputs:
cmd += " --output {}".format(output)
#
if output in combination:
pos = getattr(self, "{}_pos".format(output), "0x0")
primary = getattr(self, "{}_primary".format(output), None)
resolution = getattr(self, "{}_mode".format(output), None)
resolution = "--mode {}".format(resolution) if resolution else "--auto"
rotation = getattr(self, "{}_rotate".format(output), "normal")
if rotation not in ["inverted", "left", "normal", "right"]:
self.py3.log("configured rotation {} is not valid".format(rotation))
rotation = "normal"
#
if primary is True and not primary_added:
primary_added = True
cmd += " --primary"
if mode == "clone" and previous_output is not None:
cmd += " {} --same-as {}".format(resolution, previous_output)
else:
if (
"above" in pos
or "below" in pos
or "left-of" in pos
or "right-of" in pos
):
cmd += " {} --{} --rotate {}".format(resolution, pos, rotation)
else:
cmd += " {} --pos {} --rotate {}".format(
resolution, pos, rotation
)
previous_output = output
else:
cmd += " --off"
#
code = self.py3.command_run(cmd)
if code == 0:
self.active_comb = combination
self.active_layout = self.displayed
self.active_mode = mode
self.py3.log('command "{}" exit code {}'.format(cmd, code))
if self.command:
self.py3.command_run(self.command)
# move workspaces to outputs as configured
self._apply_workspaces(combination, mode)
def _apply_workspaces(self, combination, mode):
"""
Allows user to force move a comma separated list of workspaces to the
given output when it's activated.
Example:
- DP1_workspaces = "1,2,3"
"""
if len(combination) > 1 and mode == "extend":
sleep(3)
for output in combination:
workspaces = getattr(self, "{}_workspaces".format(output), "").split(
","
)
for workspace in workspaces:
if not workspace:
continue
# switch to workspace
cmd = '{} workspace "{}"'.format(self.py3.get_wm_msg(), workspace)
self.py3.command_run(cmd)
# move it to output
cmd = '{} move workspace to output "{}"'.format(
self.py3.get_wm_msg(), output
)
self.py3.command_run(cmd)
# log this
self.py3.log(
"moved workspace {} to output {}".format(workspace, output)
)
def _fallback_to_available_output(self):
"""
Fallback to the first available output when the active layout
was composed of only one output.
This allows us to avoid cases where you get stuck with a black sreen
on your laptop by switching back to the integrated screen
automatically !
"""
if len(self.active_comb) == 1:
self._choose_what_to_display(force_refresh=True)
self._apply()
self.py3.update()
def _force_force_on_start(self):
"""
Force the user configured mode on start.
"""
if self.force_on_start in self.available_combinations:
self.displayed = self.force_on_start
self._choose_what_to_display(force_refresh=True)
self._apply(force=True)
self.py3.update()
self.force_on_start = None
def _switch_selection(self, direction):
self.available_combinations.rotate(direction)
self.displayed = self.available_combinations[0]
def _force_on_change(self):
"""
Handle force_on_change feature.
"""
for layout in self.force_on_change:
if layout in self.available_combinations:
if self.active_layout != layout:
self.displayed = layout
self._apply(force=True)
self.py3.update()
break
else:
break
def on_click(self, event):
"""
Click events
- left click & scroll up/down: switch between modes
- right click: apply selected mode
- middle click: force refresh of available modes
"""
self._no_force_on_change = True
button = event["button"]
if button == 4:
self._switch_selection(-1)
if button in [1, 5]:
self._switch_selection(1)
if button == 2:
self._choose_what_to_display(force_refresh=True)
if button == 3:
self._apply()
def xrandr(self):
"""
This is the main py3status method, it will orchestrate what's being
displayed on the bar.
"""
self.layout = self._get_layout()
self._set_available_combinations()
self._choose_what_to_display()
if len(self.available_combinations) < 2 and self.hide_if_single_combination:
full_text = self.py3.safe_format(self.format, {"output": ""})
else:
if self.fixed_width is True:
output = self._center(self.displayed)
else:
output = self.displayed
full_text = self.py3.safe_format(self.format, {"output": output})
response = {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": full_text,
}
# coloration
if self.displayed == self.active_layout:
response["color"] = self.py3.COLOR_GOOD
elif self.displayed not in self.available_combinations:
response["color"] = self.py3.COLOR_BAD
# force default layout setup at module startup
if self.force_on_start is not None:
sleep(1)
self._force_force_on_start()
# follow on change
if not self._no_force_on_change and self.force_on_change:
self._force_on_change()
# this was a click event triggered update
if self._no_force_on_change:
self._no_force_on_change = False
# fallback detection
if self.active_layout not in self.available_combinations:
response["color"] = self.py3.COLOR_DEGRADED
if self.fallback is True:
self._fallback_to_available_output()
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| 37.364151
| 88
| 0.577943
|
acfe4aa85bdc1d4dd34358a016e7eb02308828ec
| 85
|
py
|
Python
|
microservices/FarmaWebService/delivery/dev_scripter/settings_global.py
|
lsbloo/FarmaApp
|
c386eec03938d88e3aaf7da25d86010ac8b98096
|
[
"MIT"
] | null | null | null |
microservices/FarmaWebService/delivery/dev_scripter/settings_global.py
|
lsbloo/FarmaApp
|
c386eec03938d88e3aaf7da25d86010ac8b98096
|
[
"MIT"
] | null | null | null |
microservices/FarmaWebService/delivery/dev_scripter/settings_global.py
|
lsbloo/FarmaApp
|
c386eec03938d88e3aaf7da25d86010ac8b98096
|
[
"MIT"
] | null | null | null |
import os
URL_ENDPOINT_SENDER = os.environ.get('URL_ENDPOINT_POST', 'DONT SETTER')
| 17
| 72
| 0.776471
|
acfe4b38c9a53edb613fd034bfc35b355b9bff63
| 1,222
|
py
|
Python
|
kevin-bacon-status.py
|
jonesinator/kevin-bacon-status
|
58e721284d1e58eadc2191dcd7c1c1b9019ff1ab
|
[
"Unlicense"
] | null | null | null |
kevin-bacon-status.py
|
jonesinator/kevin-bacon-status
|
58e721284d1e58eadc2191dcd7c1c1b9019ff1ab
|
[
"Unlicense"
] | null | null | null |
kevin-bacon-status.py
|
jonesinator/kevin-bacon-status
|
58e721284d1e58eadc2191dcd7c1c1b9019ff1ab
|
[
"Unlicense"
] | null | null | null |
import Tkinter
import urllib2
from PIL import Image, ImageTk
def update():
html = urllib2.urlopen('http://www.deadoraliveinfo.com/dead.Nsf/bnames/Bacon+Kevin').read()
if 'alive-record' in html:
panel.config(image = aliveImage)
button.config(text='Kevin Bacon is Alive.')
elif 'dead-record' in html:
panel.config(image = deadImage)
button.config(text='Kevin Bacon is Dead.')
else:
panel.config(image = unknownImage)
button.config(text='The status of Kevin Bacon is unknown.')
root.after(5000, update)
root = Tkinter.Tk()
root.title('Kevin Bacon Status')
aliveImage = ImageTk.PhotoImage(Image.open('kevin-bacon-alive.jpg'))
deadImage = ImageTk.PhotoImage(Image.open('kevin-bacon-dead.jpg'))
unknownImage = ImageTk.PhotoImage(Image.open('kevin-bacon-unknown.jpg'))
widths = [aliveImage.width(), deadImage.width(), unknownImage.width()]
heights = [aliveImage.height(), deadImage.height(), unknownImage.height()]
root.geometry("%dx%d+%d+%d" % (max(widths), max(heights), 0, 0))
panel = Tkinter.Label(root, image=aliveImage)
panel.pack(side='top', fill='both', expand='yes')
button = Tkinter.Button(panel)
button.pack(side='bottom')
update()
root.mainloop()
| 38.1875
| 95
| 0.704583
|
acfe4e8d6237dd2d2cfd15f8151c52e45007c4dd
| 488
|
py
|
Python
|
projects/speech_translation/data_prep/spm_encode.py
|
tran-khoa/fairseq
|
558366b3c6970a5dd85ad1909581d43e41fdce9f
|
[
"MIT"
] | null | null | null |
projects/speech_translation/data_prep/spm_encode.py
|
tran-khoa/fairseq
|
558366b3c6970a5dd85ad1909581d43e41fdce9f
|
[
"MIT"
] | null | null | null |
projects/speech_translation/data_prep/spm_encode.py
|
tran-khoa/fairseq
|
558366b3c6970a5dd85ad1909581d43e41fdce9f
|
[
"MIT"
] | null | null | null |
import argparse
import sentencepiece as spm
from tqdm import tqdm
parser = argparse.ArgumentParser()
parser.add_argument('model')
parser.add_argument('raw')
args = parser.parse_args()
sp = spm.SentencePieceProcessor(model_file=args.model)
with open(args.raw, 'rt') as in_file:
for in_line in tqdm(in_file):
if in_line.strip() == '<UNK>':
print("<unk>")
continue
out_line = sp.Encode(in_line, out_type='str')
print(' '.join(out_line))
| 27.111111
| 54
| 0.670082
|
acfe4ee8198af3e3744d126cbc47fbba2d83d263
| 3,881
|
py
|
Python
|
google/ads/google_ads/v1/proto/errors/distinct_error_pb2.py
|
jwygoda/google-ads-python
|
863892b533240cb45269d9c2cceec47e2c5a8b68
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v1/proto/errors/distinct_error_pb2.py
|
jwygoda/google-ads-python
|
863892b533240cb45269d9c2cceec47e2c5a8b68
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v1/proto/errors/distinct_error_pb2.py
|
jwygoda/google-ads-python
|
863892b533240cb45269d9c2cceec47e2c5a8b68
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v1/proto/errors/distinct_error.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v1/proto/errors/distinct_error.proto',
package='google.ads.googleads.v1.errors',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v1.errorsB\022DistinctErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v1/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V1.Errors\312\002\036Google\\Ads\\GoogleAds\\V1\\Errors\352\002\"Google::Ads::GoogleAds::V1::Errors'),
serialized_pb=_b('\n9google/ads/googleads_v1/proto/errors/distinct_error.proto\x12\x1egoogle.ads.googleads.v1.errors\x1a\x1cgoogle/api/annotations.proto\"m\n\x11\x44istinctErrorEnum\"X\n\rDistinctError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x15\n\x11\x44UPLICATE_ELEMENT\x10\x02\x12\x12\n\x0e\x44UPLICATE_TYPE\x10\x03\x42\xed\x01\n\"com.google.ads.googleads.v1.errorsB\x12\x44istinctErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v1/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V1.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V1\\Errors\xea\x02\"Google::Ads::GoogleAds::V1::Errorsb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_DISTINCTERRORENUM_DISTINCTERROR = _descriptor.EnumDescriptor(
name='DistinctError',
full_name='google.ads.googleads.v1.errors.DistinctErrorEnum.DistinctError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_ELEMENT', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_TYPE', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=144,
serialized_end=232,
)
_sym_db.RegisterEnumDescriptor(_DISTINCTERRORENUM_DISTINCTERROR)
_DISTINCTERRORENUM = _descriptor.Descriptor(
name='DistinctErrorEnum',
full_name='google.ads.googleads.v1.errors.DistinctErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_DISTINCTERRORENUM_DISTINCTERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=123,
serialized_end=232,
)
_DISTINCTERRORENUM_DISTINCTERROR.containing_type = _DISTINCTERRORENUM
DESCRIPTOR.message_types_by_name['DistinctErrorEnum'] = _DISTINCTERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DistinctErrorEnum = _reflection.GeneratedProtocolMessageType('DistinctErrorEnum', (_message.Message,), dict(
DESCRIPTOR = _DISTINCTERRORENUM,
__module__ = 'google.ads.googleads_v1.proto.errors.distinct_error_pb2'
,
__doc__ = """Container for enum describing possible distinct errors.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v1.errors.DistinctErrorEnum)
))
_sym_db.RegisterMessage(DistinctErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 38.425743
| 657
| 0.785107
|
acfe5035130817b9ffdee77b72eec0ca8075489d
| 4,893
|
py
|
Python
|
lib/candy_editor/qt/controls/EditorCommon/ToolBar/ToolBarService.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 1
|
2021-11-06T14:38:37.000Z
|
2021-11-06T14:38:37.000Z
|
lib/candy_editor/qt/controls/EditorCommon/ToolBar/ToolBarService.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 5
|
2021-11-06T04:23:06.000Z
|
2022-03-12T01:03:25.000Z
|
lib/candy_editor/qt/controls/EditorCommon/ToolBar/ToolBarService.py
|
lihaochen910/Candy
|
d12cb964768459c22f30c22531d3e1734901e814
|
[
"MIT"
] | 1
|
2021-11-07T05:19:51.000Z
|
2021-11-07T05:19:51.000Z
|
from enum import IntEnum
from abc import abstractmethod
from PyQt5.QtCore import Qt, pyqtSignal, QPoint, qWarning, QDataStream, QObject
from PyQt5.QtGui import QPainter
from PyQt5.QtWidgets import QWidget, QSizePolicy, QBoxLayout, QStyleOption, QStyle, QSpacerItem, QApplication
from ..DragDrop import CDragDropData
from .ToolBarAreaItem import CToolBarAreaItem, CToolBarItem, CSpacerItem, CSpacerType
class QItemDescType ( IntEnum ):
Command = 0
CVar = 1
Separator = 2
class QItemDesc:
@abstractmethod
def toVariant ( self ):
pass
@abstractmethod
def getType ( self ):
pass
class QSeparatorDesc ( QItemDesc ):
def toVariant ( self ):
return "separator"
def getType ( self ):
return QItemDescType.Separator
# TODO: QCommandDesc
class QCommandDesc ( QItemDesc ):
commandChangedSignal = pyqtSignal ()
def __init__ ( self, commandOrVariantMap, version ):
self.name = ''
self.command = ''
self.iconPath = ''
self.isCustom = True
self.isDeprecated = True
def toVariant ( self ):
return "separator"
def toQCommandAction ( self ):
pass
def getType ( self ):
return QItemDescType.Command
def setName ( self, name ):
pass
def setIcon ( self, path ):
pass
def getName ( self ):
return self.name
def getCommand ( self ):
return self.command
def getIcon ( self ):
return self.iconPath
def isCustom ( self ):
return self.isCustom
def isDeprecated ( self ):
pass
def initFromCommand ( self, command ):
pass
class QCVarDesc ( QItemDesc ):
cvarChangedSignal = pyqtSignal ()
def __init__ ( self, variantMap = None, version = None ):
self.name = ''
self.iconPath = ''
self.value = {}
self.isBitFlag_ = True
def toVariant ( self ):
return "separator"
def getType ( self ):
return QItemDescType.CVar
def setCVar ( self, path ):
pass
def setCVarValue ( self, cvarValue ):
pass
def setIcon ( self, path ):
pass
def getName ( self ):
return self.name
def getValue ( self ):
return self.value
def getIcon ( self ):
return self.iconPath
def isBitFlag ( self ):
return self.isBitFlag_
class QToolBarDesc:
toolBarChangedSignal = pyqtSignal ( object )
@staticmethod
def getNameFromFileInfo ( self, fileInfo ):
pass
def __init__ ( self ):
self.name = ''
self.path = ''
self.items = []
self.separatorIndices = []
self.updated = False
def initialize ( self, commandList, version ):
pass
def toVariant ( self ):
pass
def indexOfItem ( self, item ):
pass
def indexOfCommand ( self, command ):
pass
def getItemDescAt ( self, idx ):
return self.items[ idx ]
def createItem ( self, item, version ):
pass
def moveItem ( self, currIdx, idx ):
pass
def insertItem ( self, itemVariant, idx ):
pass
def insertCommand ( self, command, idx ):
pass
def insertCVar ( self, cvarName, idx ):
pass
def insertSeparator ( self, idx ):
pass
def removeItem ( self, itemOrIdx ):
pass
def getName ( self ):
return self.name
def setName ( self, name ):
self.name = name
def getPath ( self ):
return self.path
def setPath ( self, path ):
self.path = path
def getObjectName ( self ):
return self.name + "ToolBar"
def getItems ( self ):
return self.items
def requiresUpdate ( self ):
pass
def markAsUpdated ( self ):
self.updated = True
def onCommandChanged ( self ):
self.toolBarChangedSignal.emit ( self )
def insertItem ( self, item, idx ):
pass
class CVarActionMapper ( QObject ):
def addCVar ( self, cVarDesc ):
pass
def onCVarChanged ( self, cVar ):
pass
def onCVarActionDestroyed ( self, cVar, object ):
pass
class CToolBarService: # CUserData
def __init__ ( self ):
pass
def createToolBarDesc ( self, editor, szName ):
pass
def saveToolBar ( self, toolBarDesc ):
pass
def removeToolBar ( self, toolBarDesc ):
pass
def getToolBarNames ( self, editor ):
pass
def getToolBarDesc ( self, editor, name ):
pass
def toVariant ( self, command ):
pass
def createToolBar ( self, toolBarDesc, toolBar, editor ):
pass
def loadToolBars ( self, editor ):
pass
def migrateToolBars ( self, szSourceDirectory, szDestinationDirectory ):
pass
def createToolBarDesc ( self, szEditorName, szToolBarName ):
pass
def getToolBarNames ( self, szRelativePath ):
pass
def loadToolBars ( self, szRelativePath, editor = None ):
pass
def getToolBarDesc ( self, szRelativePath ):
pass
def getToolBarDirectories ( self, szRelativePath ):
pass
def findToolBarsInDirAndExecute ( self, dirPath, callback ):
pass
def getToolBarNamesFromDir ( self, dirPath, outResult ):
pass
def loadToolBarsFromDir ( self, dirPath, outToolBarDescriptors ):
pass
def loadToolBar ( self, absolutePath ):
pass
def createEditorToolBars ( self, toolBarDescriptors, editor = None ):
pass
def createEditorToolBar ( self, toolBarDesc, editor = None ):
pass
| 17.66426
| 109
| 0.697731
|
acfe50b9260ad9e3b23be21e68fe8025f5a4ae2c
| 2,480
|
py
|
Python
|
backend/metagrid/users/tests/test_views.py
|
tomvothecoder/metagrid
|
a38f3f78fbaf78f909c74a263c03a3f6d376b75c
|
[
"MIT"
] | 5
|
2020-06-08T23:20:33.000Z
|
2021-11-29T15:25:27.000Z
|
backend/metagrid/users/tests/test_views.py
|
aims-group/metagrid
|
32bd76638d80c3da7529a73ab8509eb53264c8dc
|
[
"MIT"
] | 359
|
2020-04-24T19:45:56.000Z
|
2022-03-31T18:04:12.000Z
|
backend/metagrid/users/tests/test_views.py
|
tomvothecoder/metagrid
|
a38f3f78fbaf78f909c74a263c03a3f6d376b75c
|
[
"MIT"
] | 1
|
2021-03-01T17:27:43.000Z
|
2021-03-01T17:27:43.000Z
|
import factory
import pytest
from django.contrib.auth.hashers import check_password
from django.urls import reverse
from faker import Faker
from rest_framework import status
from rest_framework.test import APITestCase
from metagrid.users.models import User
from metagrid.users.tests.factories import UserFactory, raw_password
fake = Faker()
pytestmark = pytest.mark.django_db
class TestUserListTestCase(APITestCase):
"""
Tests /users list operations.
"""
def setUp(self):
self.url = reverse("user-list")
self.user_data = factory.build(dict, FACTORY_CLASS=UserFactory)
def test_post_request_with_no_data_fails(self):
response = self.client.post(self.url, {})
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_post_request_with_valid_data_succeeds(self):
response = self.client.post(self.url, self.user_data)
assert response.status_code == status.HTTP_201_CREATED
user = User.objects.get(pk=response.data.get("id"))
assert user.email == self.user_data.get("email")
assert check_password(self.user_data.get("password"), user.password)
class TestUserDetailTestCase(APITestCase):
"""
Tests /users detail operations.
"""
def setUp(self):
self.user = UserFactory()
# Login user to fetch access token
rest_login_url = reverse("rest_login")
payload = {
"email": self.user.email,
"password": raw_password,
}
response = self.client.post(
rest_login_url,
payload,
format="json",
)
assert response.status_code == status.HTTP_200_OK
# Add access token to authorization header
access_token = response.data["access_token"]
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {access_token}")
# URL for user's detail
self.url = reverse("user-detail", kwargs={"pk": self.user.pk})
def test_get_request_returns_a_given_user(self):
response = self.client.get(self.url)
assert response.status_code == status.HTTP_200_OK
def test_put_request_updates_a_user(self):
new_first_name = fake.first_name()
payload = {"first_name": new_first_name}
response = self.client.put(self.url, payload)
assert response.status_code == status.HTTP_200_OK
user = User.objects.get(pk=self.user.id)
assert user.first_name == new_first_name
| 31.392405
| 76
| 0.681452
|
acfe5265c6300f29a773d87a9e6b8059f07e9a50
| 10,745
|
py
|
Python
|
grapl_analyzerlib/nodes/comparators.py
|
grapl-security/grapl_analyzerlib
|
8591dfe549df71ac319283223b5cff3edac31d51
|
[
"MIT"
] | 3
|
2019-06-08T05:52:09.000Z
|
2020-01-31T03:19:26.000Z
|
grapl_analyzerlib/nodes/comparators.py
|
graplsec/grapl_analyzerlib
|
8591dfe549df71ac319283223b5cff3edac31d51
|
[
"MIT"
] | 449
|
2020-09-11T07:07:18.000Z
|
2021-08-03T06:09:36.000Z
|
grapl_analyzerlib/nodes/comparators.py
|
graplsec/grapl_analyzerlib
|
8591dfe549df71ac319283223b5cff3edac31d51
|
[
"MIT"
] | 2
|
2020-01-31T03:19:57.000Z
|
2020-04-17T20:10:05.000Z
|
import re
from typing import List, Union, TypeVar, Generic, Optional, Sequence, Any, cast, Tuple
from grapl_analyzerlib.nodes.types import PropertyT
T = TypeVar("T", bound=Union[str, int])
PropertyFilter = List[List["Cmp[T]"]]
StrCmp = Union[str, "Not[str]", List[str], List[Union[str, "Not[str]"]]]
IntCmp = Union[int, "Not[int]", List[int], List[Union[int, "Not[int]"]]]
def escape_dgraph_regexp(input: str) -> str:
input = re.escape(input)
output = ""
for char in input:
if char == '"':
output += r"\""
elif char == "/":
output += r"\/"
else:
output += char
return output
def escape_dgraph_str(input: str, query=False) -> str:
output = ""
for char in input:
if char == "$":
output += "//$"
elif char == "\n":
if query:
output += r"//\\n"
else:
output += r"//\n"
elif char == "\\":
if query:
output += r"\\\\"
else:
output += r"\\"
elif char == '"':
if query:
output += r"\""
else:
output += r'"'
else:
output += char
return output
def unescape_dgraph_str(input: str) -> str:
if not isinstance(input, str):
return input
output = input.replace("//$", "$")
output = output.replace(r"//\n", "\n")
output = output.replace(r"\"", '"')
output = output.replace(r"\\", "\\")
return output
class Or(object):
def __init__(self, *values: PropertyT):
self.values = values
class Not(Generic[T]):
def __init__(self, value: T) -> None:
self.value = value
class Cmp(Generic[T]):
def to_filter(self) -> str:
pass
class Eq(Cmp[T]):
def __init__(self, predicate: str, value: Union[T, Not[T]]) -> None:
self.predicate = predicate
if isinstance(value, str):
self.value = escape_dgraph_str(
value, query=True
) # type: Union[str, Not[str]]
elif isinstance(value, Not) and isinstance(value.value, str):
self.value = Not(escape_dgraph_str(value.value, query=True))
else:
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, str):
if self.predicate == "dgraph.type":
return f"type({self.value})"
return 'eq({}, "{}")'.format(self.predicate, self.value,)
if isinstance(self.value, int):
return "eq({}, {})".format(self.predicate, self.value)
if isinstance(self.value, Not) and isinstance(self.value.value, str):
if self.predicate == "dgraph.type":
return f"NOT type({self.value})"
return 'NOT eq({}, "{}")'.format(self.predicate, self.value.value)
if isinstance(self.value, Not) and isinstance(self.value.value, int):
return "NOT eq({}, {})".format(self.predicate, self.value.value)
raise TypeError
class EndsWith(Cmp[str]):
def __init__(self, predicate: str, value: Union[str, Not[str]]) -> None:
self.predicate = predicate
if isinstance(value, str):
self.value = escape_dgraph_str(value) # type: Union[str, Not[str]]
else:
value.value = Not(escape_dgraph_str(value.value))
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
value = self.value.value
escaped_value = re.escape(value)
return "NOT regexp({}, /{}$/i)".format(self.predicate, escaped_value)
else:
escaped_value = re.escape(self.value)
return "regexp({}, /{}$/i)".format(self.predicate, escaped_value)
class StartsWith(Cmp[str]):
def __init__(self, predicate: str, value: Union[str, Not[str]]) -> None:
self.predicate = predicate
if isinstance(value, str):
self.value = escape_dgraph_str(value) # type: Union[str, Not[str]]
else:
value.value = Not(escape_dgraph_str(value.value))
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
value = self.value.value
escaped_value = re.escape(value)
return "NOT regexp({}, /^{}.*/i)".format(self.predicate, escaped_value)
else:
escaped_value = re.escape(self.value)
return "regexp({}, /^{}.*/i)".format(self.predicate, escaped_value)
class Rex(Cmp[str]):
def __init__(self, predicate: str, value: Union[str, Not[str]]) -> None:
self.predicate = predicate
if isinstance(value, str):
self.value = value.replace("$", "//$").replace("\n", "//\n")
else:
value.value.replace("$", "//$").replace("\n", "//\n")
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
value = self.value.value
return f"NOT regexp({self.predicate}, /{value}/)"
else:
return f"regexp({self.predicate}, /{self.value}/)"
class Gt(Cmp[int]):
def __init__(self, predicate: str, value: Union[int, Not[int]]) -> None:
self.predicate = predicate
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
return f"NOT gt({self.predicate}, {self.value})"
else:
return f"gt({self.predicate}, {self.value})"
class Lt(Cmp[int]):
def __init__(self, predicate: str, value: Union[int, Not[int]]) -> None:
self.predicate = predicate
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
return f"NOT lt({self.predicate}, {self.value})"
else:
return f"lt({self.predicate}, {self.value})"
class Has(Cmp[Any]):
def __init__(self, predicate: str) -> None:
self.predicate = predicate
def to_filter(self) -> str:
return f"has({self.predicate})"
class Contains(Cmp[str]):
def __init__(self, predicate: str, value: Union[str, Not[str]]) -> None:
self.predicate = predicate
if isinstance(value, str):
self.value = escape_dgraph_regexp(value)
else:
value.value = Not(escape_dgraph_regexp(value.value))
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
# value = re.escape(self.value.value)
# value = value.replace("/", "\\/")
return f"NOT regexp({self.predicate}, /{self.value.value}/)"
else:
# value = re.escape(self.value)
# value = value
# value = value.replace("/", "\\/")
return f"regexp({self.predicate}, /{self.value}/)"
class Regexp(Cmp[str]):
def __init__(self, predicate: str, value: Union[str, Not[str]]) -> None:
self.predicate = predicate
self.value = value
def to_filter(self) -> str:
if isinstance(self.value, Not):
value = self.value.value.replace("/", "\\/")
return f"NOT regexp({self.predicate}, /{value}/)"
else:
value = self.value.replace("/", "\\/")
return f"regexp({self.predicate}, /{value}/)"
class Distance(Cmp[str]):
def __init__(
self, predicate: str, value: Union[str, Not[str]], distance: int
) -> None:
self.predicate = predicate
self.value = value
self.distance = distance
def to_filter(self) -> str:
if isinstance(self.value, Not):
value = self.value.value
return f'NOT match({self.predicate}, "{value}", {self.distance})'
else:
value = self.value
return f'match({self.predicate}, "{value}", {self.distance})'
def _str_cmps(
predicate: str,
eq: Optional[StrCmp] = None,
contains: Optional[StrCmp] = None,
ends_with: Optional[StrCmp] = None,
starts_with: Optional[StrCmp] = None,
regexp: Optional[StrCmp] = None,
distance: Optional[Tuple[StrCmp, int]] = None,
) -> List[List[Cmp[str]]]:
cmps = [] # type: List[Sequence[Cmp[str]]]
if isinstance(eq, str) or isinstance(eq, Not):
cmps.append([Eq(predicate, eq)])
elif isinstance(eq, list):
_eq = [Eq(predicate, e) for e in eq]
cmps.append(_eq)
if isinstance(contains, str) or isinstance(contains, Not):
cmps.append([Contains(predicate, contains)])
elif isinstance(contains, list):
_contains = [Contains(predicate, e) for e in contains]
cmps.append(_contains)
if isinstance(ends_with, str) or isinstance(ends_with, Not):
cmps.append([EndsWith(predicate, ends_with)])
elif isinstance(ends_with, list):
_ends_with = [EndsWith(predicate, e) for e in ends_with]
cmps.append(_ends_with)
if isinstance(starts_with, str) or isinstance(starts_with, Not):
cmps.append([StartsWith(predicate, starts_with)])
elif isinstance(starts_with, list):
_starts_with = [StartsWith(predicate, e) for e in starts_with]
cmps.append(_starts_with)
if isinstance(regexp, str) or isinstance(regexp, Not):
cmps.append([Rex(predicate, regexp)])
elif isinstance(regexp, list):
_regexp = [Rex(predicate, e) for e in regexp]
cmps.append(_regexp)
if distance:
if isinstance(distance[0], str) or isinstance(distance[0], Not):
cmps.append([Distance(predicate, distance[0], distance[1])])
elif isinstance(distance, list):
_distance = [Distance(predicate, e[0], e[1]) for e in distance]
cmps.append(_distance)
if not cmps:
cmps.append([Has(predicate)])
return cast(List[List[Cmp[str]]], cmps)
def _int_cmps(
predicate: str,
eq: Optional[IntCmp] = None,
gt: Optional[IntCmp] = None,
lt: Optional[IntCmp] = None,
) -> List[List[Cmp[int]]]:
cmps = [] # type: List[Sequence[Cmp[int]]]
if isinstance(eq, int) or isinstance(eq, Not):
cmps.append([Eq(predicate, eq)])
elif isinstance(eq, list):
_eq = [Eq(predicate, e) for e in eq]
cmps.append(_eq)
if isinstance(gt, int) or isinstance(gt, Not):
cmps.append([Gt(predicate, gt)])
elif isinstance(gt, list):
_gt = [Gt(predicate, e) for e in gt]
cmps.append(_gt)
if isinstance(lt, int) or isinstance(lt, Not):
cmps.append([Lt(predicate, lt)])
elif isinstance(lt, list):
_lt = [Lt(predicate, e) for e in lt]
cmps.append(_lt)
if eq is None and gt is None and lt is None:
cmps.append([Has(predicate)])
return cast(List[List[Cmp[int]]], cmps)
| 32.170659
| 86
| 0.570126
|
acfe526dc0a2cc38d09050c3c01664cc0ef4c5c8
| 824
|
py
|
Python
|
tests/test_distribution.py
|
hakkeroid/configstacker
|
69bddeeae98f327f8cd179bdb5e2097437d97822
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_distribution.py
|
hakkeroid/configstacker
|
69bddeeae98f327f8cd179bdb5e2097437d97822
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_distribution.py
|
hakkeroid/configstacker
|
69bddeeae98f327f8cd179bdb5e2097437d97822
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import pkg_resources
import pytest
SOURCE_FILE_MAP = {
'yaml': 'YAMLFile',
}
DIST = pkg_resources.get_distribution('configstacker')
@pytest.fixture(params=DIST.extras)
def dependencies(request):
extra = request.param
source = SOURCE_FILE_MAP[extra]
requirements = DIST.requires(extras=[extra])[1:]
yield extra, source, requirements
def test_dependencies(dependencies):
extra, source, requirements = dependencies
for requirement in requirements:
installed = pkg_resources.working_set.find(requirement)
if not installed:
with pytest.raises(ImportError) as exc_info:
from configstacker import sources
getattr(sources, source)('some source')
assert 'optional dependency' in str(exc_info.value)
| 24.969697
| 63
| 0.68932
|
acfe52939c03edd7302ea495bddccadff42e65ab
| 2,532
|
py
|
Python
|
sbg_cwl_upgrader/converter/sbg_draft2_to_cwl_1_0.py
|
sbg/sevenbridges-cwl-draft2-upgrader
|
5ac4d000674e4e2611a7c1816b523cfaeed2cfe7
|
[
"Apache-2.0"
] | 6
|
2019-09-03T13:15:33.000Z
|
2021-12-22T07:45:26.000Z
|
sbg_cwl_upgrader/converter/sbg_draft2_to_cwl_1_0.py
|
sbg/sevenbridges-cwl-draft2-upgrader
|
5ac4d000674e4e2611a7c1816b523cfaeed2cfe7
|
[
"Apache-2.0"
] | 1
|
2020-02-06T11:39:39.000Z
|
2020-02-06T11:39:39.000Z
|
sbg_cwl_upgrader/converter/sbg_draft2_to_cwl_1_0.py
|
sbg/sevenbridges-cwl-draft2-upgrader
|
5ac4d000674e4e2611a7c1816b523cfaeed2cfe7
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import sys
from sbg_cwl_upgrader.sbg_utils import (add_sbg_auth_to_args,
configure_logging,
add_logging_to_args)
from sbg_cwl_upgrader.converter.cwl_converter import CWLConverterFacade
from sbg_cwl_upgrader.cwl_utils import DEFAULT_CWL_VERSION
def create_arg_parser():
parser = argparse.ArgumentParser(
description=' This tool converts CWL draft2 applications '
'(workflows, command line tools) to CWL v1.0 or v1.1')
parser.add_argument('-i', '--input', required=True,
help='can be either draft2 file (YAML, JSON, CWL)'
' path or application ID.')
parser.add_argument('-o', '--output', required=True,
help='can be either cwl v1 file (YAML, JSON, CWL)'
' path or application ID.')
parser.add_argument('-c', '--cwl-version', default=DEFAULT_CWL_VERSION,
choices=['v1.0', 'v1.1'],
help='set cwl version for output')
parser.add_argument('-r', '--revision', type=int,
help='platform application revision. default: latest')
parser.add_argument('-v', '--validate', action='store_true',
help='validate JS in the converted CWL v1 app.')
parser.add_argument('-u', '--update', dest='update', action='store_true',
help='update/install if output is a platform app.')
parser.add_argument('-d', '--decompose', action='store_true',
help='decompose the converted CWL v1 workflow.')
add_logging_to_args(parser)
add_sbg_auth_to_args(parser)
return parser
def main(args=sys.argv[1:]):
"""
Entrypoint and CLI for sbg_cwl_upgrader tool.
"""
args = vars(create_arg_parser().parse_args(args))
configure_logging(args)
CWLConverterFacade(token=args['token'],
profile=args['profile'],
platform=args['platform'],
endpoint=args['endpoint'],
app_revision=args['revision'],
input_=args['input'],
output=args['output'],
cwl_version=args['cwl_version'],
validate=args['validate'],
update=args['update'],
decompose=args['decompose'])
if __name__ == '__main__':
sys.exit(main())
| 40.190476
| 78
| 0.557662
|
acfe52fa9c2bdd80b5ba5525b3275b84fce1ad14
| 17,848
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/organization_security_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/organization_security_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/organization_security_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['OrganizationSecurityPolicyArgs', 'OrganizationSecurityPolicy']
@pulumi.input_type
class OrganizationSecurityPolicyArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
parent: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a OrganizationSecurityPolicy resource.
:param pulumi.Input[str] display_name: A textual name of the security policy.
:param pulumi.Input[str] parent: The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
:param pulumi.Input[str] description: A textual description for the organization security policy.
:param pulumi.Input[str] type: The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "parent", parent)
if description is not None:
pulumi.set(__self__, "description", description)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
A textual name of the security policy.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
"""
The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A textual description for the organization security policy.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class _OrganizationSecurityPolicyState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fingerprint: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering OrganizationSecurityPolicy resources.
:param pulumi.Input[str] description: A textual description for the organization security policy.
:param pulumi.Input[str] display_name: A textual name of the security policy.
:param pulumi.Input[str] fingerprint: Fingerprint of this resource. This field is used internally during updates of this resource.
:param pulumi.Input[str] parent: The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
:param pulumi.Input[str] policy_id: The unique identifier for the resource. This identifier is defined by the server.
:param pulumi.Input[str] type: The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if fingerprint is not None:
pulumi.set(__self__, "fingerprint", fingerprint)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A textual description for the organization security policy.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
A textual name of the security policy.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def fingerprint(self) -> Optional[pulumi.Input[str]]:
"""
Fingerprint of this resource. This field is used internally during updates of this resource.
"""
return pulumi.get(self, "fingerprint")
@fingerprint.setter
def fingerprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fingerprint", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The unique identifier for the resource. This identifier is defined by the server.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class OrganizationSecurityPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Organization security policies are used to control incoming/outgoing traffic.
To get more information about OrganizationSecurityPolicy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/beta/organizationSecurityPolicies)
* How-to Guides
* [Creating a firewall policy](https://cloud.google.com/vpc/docs/using-firewall-policies#create-policy)
## Example Usage
## Import
OrganizationSecurityPolicy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/organizationSecurityPolicy:OrganizationSecurityPolicy default locations/global/securityPolicies/{{policy_id}}
```
```sh
$ pulumi import gcp:compute/organizationSecurityPolicy:OrganizationSecurityPolicy default {{policy_id}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A textual description for the organization security policy.
:param pulumi.Input[str] display_name: A textual name of the security policy.
:param pulumi.Input[str] parent: The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
:param pulumi.Input[str] type: The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OrganizationSecurityPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Organization security policies are used to control incoming/outgoing traffic.
To get more information about OrganizationSecurityPolicy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/beta/organizationSecurityPolicies)
* How-to Guides
* [Creating a firewall policy](https://cloud.google.com/vpc/docs/using-firewall-policies#create-policy)
## Example Usage
## Import
OrganizationSecurityPolicy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/organizationSecurityPolicy:OrganizationSecurityPolicy default locations/global/securityPolicies/{{policy_id}}
```
```sh
$ pulumi import gcp:compute/organizationSecurityPolicy:OrganizationSecurityPolicy default {{policy_id}}
```
:param str resource_name: The name of the resource.
:param OrganizationSecurityPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OrganizationSecurityPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OrganizationSecurityPolicyArgs.__new__(OrganizationSecurityPolicyArgs)
__props__.__dict__["description"] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["type"] = type
__props__.__dict__["fingerprint"] = None
__props__.__dict__["policy_id"] = None
super(OrganizationSecurityPolicy, __self__).__init__(
'gcp:compute/organizationSecurityPolicy:OrganizationSecurityPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fingerprint: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'OrganizationSecurityPolicy':
"""
Get an existing OrganizationSecurityPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A textual description for the organization security policy.
:param pulumi.Input[str] display_name: A textual name of the security policy.
:param pulumi.Input[str] fingerprint: Fingerprint of this resource. This field is used internally during updates of this resource.
:param pulumi.Input[str] parent: The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
:param pulumi.Input[str] policy_id: The unique identifier for the resource. This identifier is defined by the server.
:param pulumi.Input[str] type: The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OrganizationSecurityPolicyState.__new__(_OrganizationSecurityPolicyState)
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["fingerprint"] = fingerprint
__props__.__dict__["parent"] = parent
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["type"] = type
return OrganizationSecurityPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A textual description for the organization security policy.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
A textual name of the security policy.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def fingerprint(self) -> pulumi.Output[str]:
"""
Fingerprint of this resource. This field is used internally during updates of this resource.
"""
return pulumi.get(self, "fingerprint")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[str]:
"""
The parent of this OrganizationSecurityPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id} or folders/{folder_id}
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> pulumi.Output[str]:
"""
The unique identifier for the resource. This identifier is defined by the server.
"""
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
The type indicates the intended use of the security policy.
For organization security policies, the only supported type
is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
return pulumi.get(self, "type")
| 42.293839
| 146
| 0.648812
|
acfe5341cfb369ac454e5038976504e054b4df20
| 2,370
|
py
|
Python
|
software/param2tex.py
|
JDLopes/iob-tex
|
45b5dc775e67631ed120740b297c74b78fb5436a
|
[
"MIT"
] | 2
|
2020-12-08T17:20:21.000Z
|
2022-01-01T22:17:24.000Z
|
software/param2tex.py
|
JDLopes/iob-tex
|
45b5dc775e67631ed120740b297c74b78fb5436a
|
[
"MIT"
] | 3
|
2021-09-12T08:55:58.000Z
|
2021-11-30T19:28:20.000Z
|
software/param2tex.py
|
JDLopes/iob-tex
|
45b5dc775e67631ed120740b297c74b78fb5436a
|
[
"MIT"
] | 10
|
2020-11-13T00:18:41.000Z
|
2021-07-28T20:23:28.000Z
|
#!/usr/bin/python2
#
# Build Latex tables of verilog module interface signals and registers
#
import sys
import os.path
import re
from vhparser import header_parse
def param_parse (program, vhfile) :
defines = {}
if 'vhfile' in locals():
#Creates header dictionary
defines = header_parse(vhfile)
program_out = []
for line in program :
flds_out = ['', '', '']
subline = re.sub('//',' ', line)
subline = re.sub('=', '', subline, 1)
flds = subline.split()
if not flds : continue #empty line
#print flds[0]
if (flds[0] != 'parameter'): continue #not a block description
#print flds
param_desc = str(re.sub('_','\_', " ".join(flds[3:])))
if param_desc.startswith("NODOC"): continue #undocummented parameter
flds_out[0] = re.sub('_','\_', flds[1]) #parameter name
flds[2] = re.sub(',', '', str(flds[2]))
if flds[2].isdigit():
flds_out[1] = re.sub('_', '\_', re.sub(',', '', flds[2])) #parameter value
else:
for key in defines:
if key in str(flds[2]):
flds[2] = eval(re.sub(str(key), str(defines[key]), flds[2]))
flds_out[1] = re.sub('_', '\_', str(flds[2])) #parameter value
flds_out[2] = "\\noindent\parbox[c]{\hsize}{\\rule{0pt}{15pt} " + str(param_desc) + " \\vspace{2mm}}" #parameter description
program_out.append(flds_out)
return program_out
def main () :
#parse command line
if len(sys.argv) != 3 and len(sys.argv) != 4:
print("Usage: ./param2tex.py infile outfile [header_file]")
exit()
else:
infile = sys.argv[1]
outfile = sys.argv[2]
if len(sys.argv) == 4:
vhfile = sys.argv[3]
pass
#parse input file
fin = open (infile, 'r')
program = fin.readlines()
program = param_parse (program, vhfile)
#write output file
fout = open (outfile, 'w')
for i in range(len(program)):
if ((i%2) != 0): fout.write("\\rowcolor{iob-blue}\n")
line = program[i]
line_out = str(line[0])
for l in range(1,len(line)):
line_out = line_out + (' & %s' % line[l])
fout.write(line_out + ' \\\ \hline\n')
#Close files
fin.close()
fout.close()
if __name__ == "__main__" : main ()
| 29.259259
| 132
| 0.546414
|
acfe54c8918d83d89de571337df7c76166511c47
| 2,216
|
py
|
Python
|
setup.py
|
Daniel-Han-Yang/catcher_modules
|
2eff08d2c19719539f761a7cae0a48b69a3231db
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
Daniel-Han-Yang/catcher_modules
|
2eff08d2c19719539f761a7cae0a48b69a3231db
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
Daniel-Han-Yang/catcher_modules
|
2eff08d2c19719539f761a7cae0a48b69a3231db
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup, find_packages
import catcher_modules
def get_requirements() -> list:
with open('requirements.txt', 'r') as f:
return f.readlines()
def extras() -> dict:
modules = {
'kafka': ["pykafka==2.8.0"],
'couchbase': ["couchbase==3.2.4"],
'postgres': ["sqlalchemy==1.4.29", "psycopg2==2.9.3"],
'mssql': ["pyodbc==4.0.32", "sqlalchemy==1.4.29"],
'mysql': ["cython==0.29.26", "pymysql==1.0.2", "sqlalchemy==1.4.29"],
'oracle': ["sqlalchemy==1.4.29", "cx_oracle==8.3.0"],
'sqlite': ["sqlalchemy==1.4.29"],
'redis': ["redis==4.1.0"],
'mongodb': ["pymongo==3.12.3", "sqlalchemy==1.4.29"],
'docker': ["docker==5.0.3"],
'elastic': ["elasticsearch==7.16.2"],
's3': ["boto3==1.20.26"],
'rabbit': ["pika==1.2.0"],
'email': ["imbox==0.9.8"],
'marketo': ["marketorestpython==0.5.14"],
'airflow': ["cryptography==36.0.1", "sqlalchemy==1.4.29"],
'selenium': ["selenium==4.1.0"],
'salesforce': ["simple-salesforce==1.11.4"]
}
modules['all'] = list(set([item for sublist in modules.values() for item in sublist]))
# don't try to install couchbase in travis
modules['travis'] = [m for m in modules['all'] if not m.startswith('couchbase')]
return modules
setup(name=catcher_modules.APPNAME,
version=catcher_modules.APPVSN,
description='Additional modules for catcher.',
author=catcher_modules.APPAUTHOR,
author_email='valerii.tikhonov@gmail.com',
url='https://github.com/comtihon/catcher_modules',
packages=find_packages(),
install_requires=get_requirements(),
include_package_data=True,
package_data={'catcher_modules': ['resources/*']},
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Software Development :: Testing'
],
extras_require=extras(),
tests_require=['mock', 'pytest', 'requests']
)
| 37.559322
| 90
| 0.581227
|
acfe54edfb5bcb4e68db87ab6b06405abbe72a2b
| 586
|
py
|
Python
|
personalcomputer.py
|
ThanasisTop/OOP-with-Python
|
9dd5d69bb2e4f4e9d1a2b99efac4a0277222a453
|
[
"MIT"
] | null | null | null |
personalcomputer.py
|
ThanasisTop/OOP-with-Python
|
9dd5d69bb2e4f4e9d1a2b99efac4a0277222a453
|
[
"MIT"
] | null | null | null |
personalcomputer.py
|
ThanasisTop/OOP-with-Python
|
9dd5d69bb2e4f4e9d1a2b99efac4a0277222a453
|
[
"MIT"
] | null | null | null |
from pctower import *
from pcscreen import *
class PersonalComputer(PCTower, PCScreen):
def __init__(self, memorysize, frequency , screensize, drivercapacity):
PCTower.__init__(self, memorysize, frequency) #Διατηρω τη κληρονομικοτητα απο την PCTower
PCScreen.__init__(self, screensize) #Διατηρω τη κληρονομικοτητα απο την PCScreen
self.DriverCapacity = drivercapacity #Δημιουργω αλλο ενα επιπλέον πεδιο
def message(self):
PCTower.message(self)
PCScreen.message(self)
print('and a hard drive disk with',self.DriverCapacity, 'GB capacity')
| 48.833333
| 96
| 0.74744
|
acfe55009a97933acc153a8d70f166edd65b9ca0
| 11,776
|
py
|
Python
|
model_sequence.py
|
gdgrant/Associative-Transfer
|
a9c72e5fd01c9ecb81387f80d7b8302a920ab13e
|
[
"Apache-2.0"
] | 1
|
2019-01-28T20:08:03.000Z
|
2019-01-28T20:08:03.000Z
|
model_sequence.py
|
gdgrant/Associative-Transfer
|
a9c72e5fd01c9ecb81387f80d7b8302a920ab13e
|
[
"Apache-2.0"
] | null | null | null |
model_sequence.py
|
gdgrant/Associative-Transfer
|
a9c72e5fd01c9ecb81387f80d7b8302a920ab13e
|
[
"Apache-2.0"
] | null | null | null |
### Authors: Nicolas Y. Masse, Gregory D. Grant
# Required packages
import tensorflow as tf
import numpy as np
import pickle
import os, sys, time
# Plotting suite
import matplotlib
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
# Model modules
from parameters_sequence import *
import stimulus_sequence
import AdamOpt_sequence as AdamOpt
import time
# Match GPU IDs to nvidia-smi command
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
# Ignore Tensorflow startup warnings
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
class Model:
def __init__(self, stimulus, reward_data, target_out, mask):
print('Defining graph...')
self.stimulus_data = stimulus
self.reward_data = reward_data
self.target_out = target_out
self.time_mask = mask
self.declare_variables()
self.run_model()
self.optimize()
print('Graph successfully defined.\n')
def declare_variables(self):
self.var_dict = {}
lstm_var_prefixes = ['Wf', 'Wi', 'Wo', 'Wc', 'Uf', 'Ui', 'Uo', 'Uc', \
'bf', 'bi', 'bo', 'bc', 'W_write', 'W_read']
RL_var_prefixes = ['W_pol', 'W_val', 'b_pol', 'b_val']
with tf.variable_scope('cortex'):
for p in lstm_var_prefixes + RL_var_prefixes:
self.var_dict[p] = tf.get_variable(p, initializer=par[p+'_init'])
def run_model(self):
#self.h = []
self.h_write = []
self.h_hat = []
self.h_concat = []
self.pol_out = []
self.val_out = []
self.action = []
self.reward = []
self.mask = []
self.pol_out_raw = []
self.target = []
h = tf.zeros([par['batch_size'], par['n_hidden']], dtype = tf.float32)
h_read = tf.zeros([par['batch_size'], par['n_latent']], dtype = tf.float32)
h_write = tf.zeros([par['batch_size'], par['n_latent']], dtype = tf.float32)
c = tf.zeros([par['batch_size'], par['n_hidden']], dtype = tf.float32)
A = tf.zeros([par['batch_size'], par['n_latent'], par['n_latent']], dtype = tf.float32)
action = tf.zeros([par['batch_size'], par['n_pol']])
for i in range(par['trials_per_seq']):
reward = tf.zeros([par['batch_size'], par['n_val']])
mask = tf.ones([par['batch_size'], 1])
for j in range(par['num_time_steps']):
# Make two possible actions and values for the network to pursue
# by way of the LSTM-based cortex module and the associative
# network hippocampus module
t = i*par['num_time_steps'] + j
input = tf.concat([mask*self.stimulus_data[t], h_read], axis = 1)
#input = tf.concat([mask*self.stimulus_data[t], h_read], axis = 1)
h, c = self.cortex_lstm(input, h, c)
#h += tf.random_normal(tf.shape(h), 0, 0.1)
h = tf.layers.dropout(h, 0.25, noise_shape = [par['batch_size'], par['n_hidden']], training = True)
salient = tf.cast(tf.not_equal(reward, tf.constant(0.)), tf.float32)
h_concat = tf.concat([self.stimulus_data[t], h, reward, action], axis = 1)
#h_concat = tf.concat([ h, reward, action], axis = 1)
h_write = tf.tensordot(h_concat, self.var_dict['W_write'], axes = [[1], [0]])
#h_write += tf.random_normal(tf.shape(h_write), 0, 0.1)
h_write = tf.nn.relu(h_write)
h_hat = tf.tensordot(h_write, self.var_dict['W_read'], axes = [[1], [0]])
h_write = tf.reshape(h_write,[par['batch_size'], par['n_latent'], 1])
h_read, A = self.fast_weights(h_write, A, salient)
#h_read *= 0
pol_out = h @ self.var_dict['W_pol'] + self.var_dict['b_pol']
val_out = h @ self.var_dict['W_val'] + self.var_dict['b_val']
# Compute outputs for action and policy loss
action_index = tf.multinomial(pol_out, 1)
action = tf.one_hot(tf.squeeze(action_index), par['n_pol'])
pol_out_sm = tf.nn.softmax(pol_out, -1) # Note softmax for entropy calculation
# Check for trial continuation (ends if previous reward is non-zero)
continue_trial = tf.cast(tf.equal(reward, 0.), tf.float32)
mask *= continue_trial
reward = tf.reduce_sum(action*self.reward_data[t,...], axis=-1, keep_dims=True) \
* mask * self.time_mask[t,:,tf.newaxis]
# Record outputs
if i >= par['dead_trials']: # discard the first ~5 trials
#self.h.append(h)
self.h_write.append(h_write)
self.h_hat.append(h_hat)
self.h_concat.append(h_concat)
self.pol_out.append(pol_out_sm)
self.pol_out_raw.append(pol_out)
self.val_out.append(val_out)
self.action.append(action)
self.reward.append(reward)
self.target.append(self.target_out[t, ...])
self.mask.append(mask * self.time_mask[t,:,tf.newaxis])
#self.h = tf.stack(self.h, axis=0)
self.h_write = tf.stack(self.h_write, axis=0)
self.h_hat = tf.stack(self.h_hat, axis=0)
self.h_concat = tf.stack(self.h_concat, axis=0)
self.pol_out = tf.stack(self.pol_out, axis=0)
self.pol_out_raw = tf.stack(self.pol_out_raw, axis=0)
self.val_out = tf.stack(self.val_out, axis=0)
self.action = tf.stack(self.action, axis=0)
self.reward = tf.stack(self.reward, axis=0)
self.target = tf.stack(self.target, axis=0)
self.mask = tf.stack(self.mask, axis=0)
def cortex_lstm(self, x, h, c):
""" Compute LSTM state from inputs and vars...
f : forgetting gate
i : input gate
c : cell state
o : output gate
...and generate an action from that state. """
# Iterate LSTM
f = tf.sigmoid(x @ self.var_dict['Wf'] + h @ self.var_dict['Uf'] + self.var_dict['bf'])
i = tf.sigmoid(x @ self.var_dict['Wi'] + h @ self.var_dict['Ui'] + self.var_dict['bi'])
o = tf.sigmoid(x @ self.var_dict['Wo'] + h @ self.var_dict['Uo'] + self.var_dict['bo'])
cn = tf.tanh(x @ self.var_dict['Wc'] + h @ self.var_dict['Uc'] + self.var_dict['bc'])
c = f * c + i * cn
h = o * tf.tanh(c)
# Return action, hidden state, and cell state
return h, c
def fast_weights(self, h, A, salient):
#A_new = par['A_alpha']*A + par['A_beta']*(tf.reshape(salient,[par['batch_size'],1,1])*h)*tf.transpose(h, [0, 2, 1])*par['A_mask']
A_new = par['A_alpha']*A + par['A_beta']*(tf.reshape(salient,[par['batch_size'],1,1])*h)*tf.transpose(h, [0, 2, 1])*par['A_mask']
for i in range(par['inner_steps']):
h = tf.reduce_sum(A * h, axis = -1, keep_dims = True)
# layer normalization
if par['batch_norm_inner']:
u, v = tf.nn.moments(h, axes = [1], keep_dims = True)
h = tf.nn.relu((h-u)/tf.sqrt(1e-9+v))
else:
h = tf.nn.relu(h)
return tf.squeeze(h), A_new
def optimize(self):
""" Calculate losses and apply corrections to model """
# Set up optimizer and required constants
epsilon = 1e-7
cortex_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='cortex')
cortex_optimizer = AdamOpt.AdamOpt(cortex_vars, learning_rate=par['learning_rate'])
# Spiking activity loss (penalty on high activation values in the hidden layer)
h_write = tf.reduce_mean(self.h_write,axis = 2)
self.spike_loss = par['spike_cost']*tf.reduce_mean(self.mask*h_write)
self.weight_loss = par['weight_cost']*tf.reduce_mean([tf.reduce_sum(tf.abs(var)) \
for var in tf.trainable_variables() if ('W_write' in var.op.name or 'W_read' in var.op.name)])
self.reconstruction_loss = par['rec_cost']*tf.reduce_mean(self.mask*tf.square(self.h_concat-self.h_hat))
# Collect loss terms and compute gradients
if par['learning_method'] == 'RL':
# Get the value outputs of the network, and pad the last time step
val_out = tf.concat([self.val_out, tf.zeros([1,par['batch_size'],par['n_val']])], axis=0)
# Determine terminal state of the network
terminal_state = tf.cast(tf.logical_not(tf.equal(self.reward, tf.constant(0.))), tf.float32)
# Compute predicted value and the advantage for plugging into the policy loss
pred_val = self.reward + par['discount_rate']*val_out[1:,:,:]*(1-terminal_state)
advantage = pred_val - val_out[:-1,:,:]
# Stop gradients back through action, advantage, and mask
action_static = tf.stop_gradient(self.action)
advantage_static = tf.stop_gradient(advantage)
mask_static = tf.stop_gradient(self.mask)
pred_val_static = tf.stop_gradient(pred_val)
# Policy loss
self.pol_loss = -tf.reduce_mean(mask_static*advantage_static*action_static*tf.log(epsilon+self.pol_out))
# Value loss
self.val_loss = 0.5*par['val_cost']*tf.reduce_mean(mask_static*tf.square(val_out[:-1,:,:]-pred_val_static))
# Entropy loss
self.ent_loss = -par['entropy_cost']*tf.reduce_mean(tf.reduce_sum(mask_static*self.pol_out*tf.log(epsilon+self.pol_out), axis=2))
# Collect RL losses
RL_loss = self.pol_loss + self.val_loss - self.ent_loss
total_loss = RL_loss + self.spike_loss + self.reconstruction_loss + self.weight_loss
elif par['learning_method'] == 'SL':
self.task_loss = tf.reduce_mean(tf.squeeze(self.mask)*tf.nn.softmax_cross_entropy_with_logits_v2(logits = self.pol_out_raw, \
labels = self.target, dim = -1))
total_loss = self.task_loss + self.spike_loss + self.reconstruction_loss + 1e-15*self.val_loss + self.weight_loss
if par['train']:
self.train_cortex = cortex_optimizer.compute_gradients(total_loss)
else:
self.train_cortex = tf.no_op()
def main(gpu_id=None):
if gpu_id is not None:
os.environ['CUDA_VISIBLE_DEVICES'] = gpu_id
print_important_params()
tf.reset_default_graph()
x = tf.placeholder(tf.float32, [par['num_time_steps']*par['trials_per_seq'], par['batch_size'], par['n_input']], 'stim')
r = tf.placeholder(tf.float32, [par['num_time_steps']*par['trials_per_seq'], par['batch_size'], par['n_pol']], 'reward')
y = tf.placeholder(tf.float32, [par['num_time_steps']*par['trials_per_seq'], par['batch_size'], par['n_pol']], 'target')
m = tf.placeholder(tf.float32, [par['num_time_steps']*par['trials_per_seq'], par['batch_size']], 'mask')
stim = stimulus_sequence.Stimulus()
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8) if gpu_id == '0' else tf.GPUOptions()
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
device = '/cpu:0' if gpu_id is None else '/gpu:0'
with tf.device(device):
model = Model(x, r, y, m)
sess.run(tf.global_variables_initializer())
for t in range(par['n_tasks']):
for i in range(par['n_batches']):
name, trial_info = stim.generate_trial(t)
feed_dict = {x:trial_info['neural_input'], r:trial_info['reward_data'],\
y: trial_info['desired_output'], m:trial_info['train_mask']}
_, reward, pol_loss, action, h, h_write, rec_loss = \
sess.run([model.train_cortex, model.reward, model.pol_loss, \
model.action, model.h, model.h_write, model.reconstruction_loss], feed_dict=feed_dict)
if i%20 == 0:
print('Iter {:>4} | Reward: {:6.3f} | Pol. Loss: {:6.3f} | Mean h_w: {:6.6f} | Rec. loos: {:6.5f} |'.format(\
i, running_avg_reward, pol_loss, np.mean(h_write), np.mean(rec_loss)))
if par['save_weights'] and i%500 == 0:
t0 = time.time()
print('Saving weights...')
weights, = sess.run([model.var_dict])
saved_data = {'weights':weights, 'par': par}
pickle.dump(saved_data, open('./savedir/{}_model_weights.pkl'.format(par['save_fn']), 'wb'))
print('Weights saved.\n')
print('Time ', time.time() - t0)
print('Model complete.\n')
def print_important_params():
notes = ''
keys = ['learning_method', 'n_hidden', 'n_latent', \
'A_alpha', 'A_beta', 'inner_steps', 'batch_norm_inner', 'learning_rate', \
'task_list', 'trials_per_seq', 'fix_break_penalty', 'wrong_choice_penalty', \
'correct_choice_reward', 'discount_rate', 'num_motion_dirs', 'spike_cost', \
'rec_cost', 'weight_cost', 'entropy_cost', 'val_cost', 'batch_size', 'n_batches']
print('-'*60)
[print('{:<24} : {}'.format(k, par[k])) for k in keys]
print('{:<24} : {}'.format('notes', notes))
print('-'*60 + '\n')
if __name__ == '__main__':
try:
if len(sys.argv) > 1:
main(sys.argv[1])
else:
main()
except KeyboardInterrupt:
quit('Quit by KeyboardInterrupt.')
| 37.148265
| 132
| 0.673573
|
acfe5502003a0194a78c4117143b721c5e9c1d96
| 2,155
|
py
|
Python
|
tests/models/symbol/new_lock_key_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
tests/models/symbol/new_lock_key_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
tests/models/symbol/new_lock_key_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
"""
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import unittest
from netapp.santricity.models.symbol.new_lock_key import NewLockKey
class NewLockKeyTest(unittest.TestCase):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
# Try instantiating the model
def test_new_lock_key(self):
new_lock_key_obj = NewLockKey()
self.assertNotEqual(new_lock_key_obj, None)
| 56.710526
| 845
| 0.770766
|
acfe553371b34bf0c741f28db46b496ceefbe344
| 2,546
|
py
|
Python
|
bin/bed2subtractScoresFromBed.py
|
odingsy/NGStoolkit
|
68d73810351550b9ba75f9184f26bc8e55708fcc
|
[
"MIT"
] | 2
|
2018-05-05T06:24:51.000Z
|
2021-07-04T22:24:13.000Z
|
bin/bed2subtractScoresFromBed.py
|
odingsy/NGStoolkit
|
68d73810351550b9ba75f9184f26bc8e55708fcc
|
[
"MIT"
] | null | null | null |
bin/bed2subtractScoresFromBed.py
|
odingsy/NGStoolkit
|
68d73810351550b9ba75f9184f26bc8e55708fcc
|
[
"MIT"
] | 2
|
2020-12-27T22:02:29.000Z
|
2021-05-28T20:28:26.000Z
|
#!/usr/bin/env python
import os
import sys
import argparse
import tempfile
from itertools import izip
from bed import bedline
parser = argparse.ArgumentParser(description='subtraction of the scores between two files: b - a')
parser.add_argument('-a', required= False, help='a of b - a')
parser.add_argument('-b', required= True, help='b of b - a')
parser.add_argument('-o', required= True, help='output')
args = parser.parse_args()
if not args.a:
os.system("cp " + args.b + " " + args.o)
sys.exit()
afile = open(args.a, 'r')
bfile = open(args.b, 'r')
out = open(args.o, 'w')
for lineB, lineA in izip(bfile, afile):
llA = lineA.strip().split('\t')
stringA = '_'.join([llA[0], llA[1], llA[2], llA[5]])
llB = lineB.strip().split('\t')
stringB = '_'.join([llB[0], llB[1], llB[2], llB[5]])
while stringB != stringA:
BL_A = bedline(lineA)
BL_B = bedline(lineB)
llA = lineA.strip().split('\t')
print(llA)
stringA = '_'.join([llA[0], llA[1], llA[2], llA[5]])
llB = lineB.strip().split('\t')
stringB = '_'.join([llB[0], llB[1], llB[2], llB[5]])
walkA = False
walkB = False
if BL_A.chromosome() > BL_B.chromosome():
walkB = True
walkA = False
elif BL_A.chromosome() < BL_B.chromosome():
walkB = False
walkA = True
else: # BL_A.chromosome() == BL_B.chromosome()
if int(BL_A.start()) > int(BL_B.start()):
walkB = True
walkA = False
elif int(BL_A.start()) < int(BL_B.start()):
walkB = False
walkA = True
else: # int(BL_A.start()) == int(BL_B.start())
if int(BL_A.end()) > int(BL_B.end()):
walkB = True
walkA = False
elif int(BL_A.end()) < int(BL_B.end()):
walkB = False
walkA = True
else:
if BL_A.strand() > BL_B.strand():
walkB = True
walkA = False
elif BL_A.strand() < BL_B.strand():
walkB = False
walkA = True
else:
if stringA != stringB:
raise ValueError('problem')
if walkA and walkB:
raise ValueError('tow files cannot be walked at the same time')
if walkA:
try:
lineA = next(afile)
except:
lineA = 'zzz\t9999999999\t9999999999\t.\t99999999999\t+'
elif walkB:
out.write(lineB.strip() + '\n')
try:
lineB = next(bfile)
except:
out.close()
sys.exit()
if stringA != stringB:
raise ValueError('two strings are not equal: ' + stringA + ' ' + stringB)
BL_A = bedline(lineA)
BL_B = bedline(lineB)
newScore = float(BL_B.score()) - float(BL_A.score())
fields = BL_B.fields()
fields[4] = str(newScore)
out.write('\t'.join(fields) + '\n')
| 25.717172
| 98
| 0.61194
|
acfe55a5e197b407167c2e020a940cf528c898f8
| 3,575
|
py
|
Python
|
dbtmetabase/__init__.py
|
davideanastasia/dbt-metabase
|
34044d1b7c3ac2db507a43ec6ed653fa2c4312e9
|
[
"MIT"
] | null | null | null |
dbtmetabase/__init__.py
|
davideanastasia/dbt-metabase
|
34044d1b7c3ac2db507a43ec6ed653fa2c4312e9
|
[
"MIT"
] | null | null | null |
dbtmetabase/__init__.py
|
davideanastasia/dbt-metabase
|
34044d1b7c3ac2db507a43ec6ed653fa2c4312e9
|
[
"MIT"
] | null | null | null |
import logging
from .dbt import DbtReader
from .metabase import MetabaseClient
__version__ = '0.5.1'
def export(dbt_path: str,
mb_host: str, mb_user: str, mb_password: str,
database: str, schema: str,
mb_https = True, sync = True, sync_timeout = 30,
includes = [], excludes = []):
"""Exports models from dbt to Metabase.
Arguments:
dbt_path {str} -- Path to dbt project.
mb_host {str} -- Metabase hostname.
mb_user {str} -- Metabase username.
mb_password {str} -- Metabase password.
database {str} -- Target database name.
schema {str} -- Target schema name.
Keyword Arguments:
mb_https {bool} -- Use HTTPS to connect to Metabase instead of HTTP. (default: {True})
sync {bool} -- Synchronize Metabase database before export. (default: {True})
sync_timeout {int} -- Synchronization timeout in seconds. (default: {30})
includes {list} -- Model names to limit processing to. (default: {[]})
excludes {list} -- Model names to exclude. (default: {[]})
"""
mbc = MetabaseClient(mb_host, mb_user, mb_password, mb_https)
models = DbtReader(dbt_path).read_models(
includes=includes,
excludes=excludes
)
if sync:
if not mbc.sync_and_wait(database, schema, models, sync_timeout):
logging.critical("Sync timeout reached, models still not compatible")
return
mbc.export_models(database, schema, models)
def main(args: list = None):
import argparse
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
parser = argparse.ArgumentParser(
description='Model synchronization from dbt to Metabase.'
)
parser.add_argument('command', choices=['export'], help="command to execute")
parser.add_argument('--dbt_path', metavar='PATH', required=True, help="path to dbt project")
parser.add_argument('--mb_host', metavar='HOST', required=True, help="Metabase hostname")
parser.add_argument('--mb_user', metavar='USER', required=True, help="Metabase username")
parser.add_argument('--mb_password', metavar='PASS', required=True, help="Metabase password")
parser.add_argument('--mb_https', metavar='HTTPS', type=bool, default=True, help="use HTTPS to connect to Metabase instead of HTTP")
parser.add_argument('--database', metavar='DB', required=True, help="target database name")
parser.add_argument('--schema', metavar='SCHEMA', required=True, help="target schema name")
parser.add_argument('--sync', metavar='ENABLE', type=bool, default=True, help="synchronize Metabase database before export")
parser.add_argument('--sync_timeout', metavar='SECS', type=int, default=30, help="synchronization timeout (in secs)")
parser.add_argument('--includes', metavar='MODELS', nargs='*', default=[], help="model names to limit processing to")
parser.add_argument('--excludes', metavar='MODELS', nargs='*', default=[], help="model names to exclude")
parsed = parser.parse_args(args=args)
if parsed.command == 'export':
export(
dbt_path=parsed.dbt_path,
mb_host=parsed.mb_host,
mb_user=parsed.mb_user,
mb_password=parsed.mb_password,
mb_https=parsed.mb_https,
database=parsed.database,
schema=parsed.schema,
sync=parsed.sync,
sync_timeout=parsed.sync_timeout,
includes=parsed.includes,
excludes=parsed.excludes
)
| 44.6875
| 136
| 0.657063
|
acfe56709286bffd5aeff2ef0ef47d0137fd7126
| 18,062
|
py
|
Python
|
kivy/tests/common.py
|
HishamAK/kivy
|
991ae22ec6af71e5188570daabd62ba128b226e5
|
[
"MIT"
] | 1
|
2022-01-24T14:20:05.000Z
|
2022-01-24T14:20:05.000Z
|
kivy/tests/common.py
|
Fxztam/kivy
|
90a4e75f9aebd6ee6c7d8635faac0c492c869e64
|
[
"MIT"
] | null | null | null |
kivy/tests/common.py
|
Fxztam/kivy
|
90a4e75f9aebd6ee6c7d8635faac0c492c869e64
|
[
"MIT"
] | 1
|
2022-01-19T09:10:47.000Z
|
2022-01-19T09:10:47.000Z
|
'''
This is a extended unittest module for Kivy, to make unittests based on
graphics with an OpenGL context.
The idea is to render a Widget tree, and after 1, 2 or more frames, a
screenshot will be made and be compared to the original one.
If no screenshot exists for the current test, the very first one will be used.
The screenshots live in the 'kivy/tests/results' folder and are in PNG format,
320x240 pixels.
'''
__all__ = (
'GraphicUnitTest', 'UnitTestTouch', 'UTMotionEvent', 'async_run',
'requires_graphics', 'ensure_web_server')
import unittest
import logging
import pytest
import sys
from functools import partial
import os
import threading
from kivy.graphics.cgl import cgl_get_backend_name
from kivy.input.motionevent import MotionEvent
log = logging.getLogger('unittest')
_base = object
if 'mock' != cgl_get_backend_name():
# check what the gl backend might be, we can't know for sure
# what it'll be until actually initialized by the window.
_base = unittest.TestCase
make_screenshots = os.environ.get('KIVY_UNITTEST_SCREENSHOTS')
http_server = None
http_server_ready = threading.Event()
kivy_eventloop = os.environ.get('KIVY_EVENTLOOP', 'asyncio')
def requires_graphics(func):
if 'mock' == cgl_get_backend_name():
return pytest.mark.skip(
reason='Skipping because gl backend is set to mock')(func)
return func
def ensure_web_server(root=None):
if http_server is not None:
return True
if not root:
root = os.path.join(os.path.dirname(__file__), "..", "..")
need_chdir = sys.version_info.major == 3 and sys.version_info.minor <= 6
curr_dir = os.getcwd()
def _start_web_server():
global http_server
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
try:
if need_chdir:
os.chdir(root)
handler = SimpleHTTPRequestHandler
else:
handler = partial(SimpleHTTPRequestHandler, directory=root)
http_server = TCPServer(
("", 8000), handler, bind_and_activate=False)
http_server.daemon_threads = True
http_server.allow_reuse_address = True
http_server.server_bind()
http_server.server_activate()
http_server_ready.set()
http_server.serve_forever()
except:
import traceback
traceback.print_exc()
finally:
http_server = None
http_server_ready.set()
if need_chdir:
os.chdir(curr_dir)
th = threading.Thread(target=_start_web_server)
th.daemon = True
th.start()
http_server_ready.wait()
if http_server is None:
raise Exception("Unable to start webserver")
class GraphicUnitTest(_base):
framecount = 0
def _force_refresh(self, *largs):
# this prevent in some case to be stuck if the screen doesn't refresh
# and we wait for a number of self.framecount that never goes down
from kivy.base import EventLoop
win = EventLoop.window
if win and win.canvas:
win.canvas.ask_update()
def render(self, root, framecount=1):
'''Call rendering process using the `root` widget.
The screenshot will be done in `framecount` frames.
'''
from kivy.base import runTouchApp
from kivy.clock import Clock
self.framecount = framecount
try:
Clock.schedule_interval(self._force_refresh, 1)
runTouchApp(root)
finally:
Clock.unschedule(self._force_refresh)
# reset for the next test, but nobody will know if it will be used :/
if self.test_counter != 0:
self.tearDown(fake=True)
self.setUp()
def run(self, *args, **kwargs):
'''Extend the run of unittest, to check if results directory have been
found. If no results directory exists, the test will be ignored.
'''
from os.path import join, dirname, exists
results_dir = join(dirname(__file__), 'results')
if make_screenshots and not exists(results_dir):
log.warning('No result directory found, cancel test.')
os.mkdir(results_dir)
self.test_counter = 0
self.results_dir = results_dir
self.test_failed = False
return super(GraphicUnitTest, self).run(*args, **kwargs)
def setUp(self):
'''Prepare the graphic test, with:
- Window size fixed to 320x240
- Default kivy configuration
- Without any kivy input
'''
# use default kivy configuration (don't load user file.)
from os import environ
environ['KIVY_USE_DEFAULTCONFIG'] = '1'
# force window size + remove all inputs
from kivy.config import Config
Config.set('graphics', 'width', '320')
Config.set('graphics', 'height', '240')
for items in Config.items('input'):
Config.remove_option('input', items[0])
# bind ourself for the later screenshot
from kivy.core.window import Window
self.Window = Window
Window.bind(on_flip=self.on_window_flip)
# ensure our window is correctly created
Window.create_window()
Window.register()
Window.initialized = True
Window.close = lambda *s: None
self.clear_window_and_event_loop()
def clear_window_and_event_loop(self):
from kivy.base import EventLoop
window = self.Window
for child in window.children[:]:
window.remove_widget(child)
window.canvas.before.clear()
window.canvas.clear()
window.canvas.after.clear()
EventLoop.touches.clear()
for post_proc in EventLoop.postproc_modules:
if hasattr(post_proc, 'touches'):
post_proc.touches.clear()
elif hasattr(post_proc, 'last_touches'):
post_proc.last_touches.clear()
def on_window_flip(self, window):
'''Internal method to be called when the window have just displayed an
image.
When an image is showed, we decrement our framecount. If framecount is
come to 0, we are taking the screenshot.
The screenshot is done in a temporary place, and is compared to the
original one -> test ok/ko.
If no screenshot is available in the results directory, a new one will
be created.
'''
from kivy.base import EventLoop
from tempfile import mkstemp
from os.path import join, exists
from os import unlink, close
from shutil import move, copy
# don't save screenshot until we have enough frames.
# log.debug('framecount %d' % self.framecount)
# ! check if there is 'framecount', otherwise just
# ! assume zero e.g. if handling runTouchApp manually
self.framecount = getattr(self, 'framecount', 0) - 1
if self.framecount > 0:
return
# don't create screenshots if not requested manually
if not make_screenshots:
EventLoop.stop()
return
reffn = None
match = False
try:
# just get a temporary name
fd, tmpfn = mkstemp(suffix='.png', prefix='kivyunit-')
close(fd)
unlink(tmpfn)
# get a filename for the current unit test
self.test_counter += 1
test_uid = '%s-%d.png' % (
'_'.join(self.id().split('.')[-2:]),
self.test_counter)
# capture the screen
log.info('Capturing screenshot for %s' % test_uid)
tmpfn = window.screenshot(tmpfn)
log.info('Capture saved at %s' % tmpfn)
# search the file to compare to
reffn = join(self.results_dir, test_uid)
log.info('Compare with %s' % reffn)
# get sourcecode
import inspect
frame = inspect.getouterframes(inspect.currentframe())[6]
sourcecodetab, line = inspect.getsourcelines(frame[0])
line = frame[2] - line
currentline = sourcecodetab[line]
sourcecodetab[line] = '<span style="color: red;">%s</span>' % (
currentline)
sourcecode = ''.join(sourcecodetab)
sourcecodetab[line] = '>>>>>>>>\n%s<<<<<<<<\n' % currentline
sourcecodeask = ''.join(sourcecodetab)
if not exists(reffn):
log.info('No image reference, move %s as ref ?' % test_uid)
if self.interactive_ask_ref(sourcecodeask, tmpfn, self.id()):
move(tmpfn, reffn)
tmpfn = reffn
log.info('Image used as reference')
match = True
else:
log.info('Image discarded')
else:
from kivy.core.image import Image as CoreImage
s1 = CoreImage(tmpfn, keep_data=True)
sd1 = s1.image._data[0].data
s2 = CoreImage(reffn, keep_data=True)
sd2 = s2.image._data[0].data
if sd1 != sd2:
log.critical(
'%s at render() #%d, images are different.' % (
self.id(), self.test_counter))
if self.interactive_ask_diff(sourcecodeask,
tmpfn, reffn, self.id()):
log.critical('user ask to use it as ref.')
move(tmpfn, reffn)
tmpfn = reffn
match = True
else:
self.test_failed = True
else:
match = True
# generate html
from os.path import join, dirname, exists, basename
from os import mkdir
build_dir = join(dirname(__file__), 'build')
if not exists(build_dir):
mkdir(build_dir)
copy(reffn, join(build_dir, 'ref_%s' % basename(reffn)))
if tmpfn != reffn:
copy(tmpfn, join(build_dir, 'test_%s' % basename(reffn)))
with open(join(build_dir, 'index.html'), 'at') as fd:
color = '#ffdddd' if not match else '#ffffff'
fd.write('<div style="background-color: %s">' % color)
fd.write('<h2>%s #%d</h2>' % (self.id(), self.test_counter))
fd.write('<table><tr><th>Reference</th>'
'<th>Test</th>'
'<th>Comment</th>')
fd.write('<tr><td><img src="ref_%s"/></td>' %
basename(reffn))
if tmpfn != reffn:
fd.write('<td><img src="test_%s"/></td>' %
basename(reffn))
else:
fd.write('<td>First time, no comparison.</td>')
fd.write('<td><pre>%s</pre></td>' % sourcecode)
fd.write('</table></div>')
finally:
try:
if reffn != tmpfn:
unlink(tmpfn)
except:
pass
EventLoop.stop()
def tearDown(self, fake=False):
'''When the test is finished, stop the application, and unbind our
current flip callback.
'''
from kivy.base import stopTouchApp
from kivy.core.window import Window
Window.unbind(on_flip=self.on_window_flip)
self.clear_window_and_event_loop()
self.Window = None
stopTouchApp()
if not fake and self.test_failed:
self.assertTrue(False)
super(GraphicUnitTest, self).tearDown()
def interactive_ask_ref(self, code, imagefn, testid):
from os import environ
if 'UNITTEST_INTERACTIVE' not in environ:
return True
from tkinter import Tk, Label, LEFT, RIGHT, BOTTOM, Button
from PIL import Image, ImageTk
self.retval = False
root = Tk()
def do_close():
root.destroy()
def do_yes():
self.retval = True
do_close()
image = Image.open(imagefn)
photo = ImageTk.PhotoImage(image)
Label(root, text='The test %s\nhave no reference.' % testid).pack()
Label(root, text='Use this image as a reference ?').pack()
Label(root, text=code, justify=LEFT).pack(side=RIGHT)
Label(root, image=photo).pack(side=LEFT)
Button(root, text='Use as reference', command=do_yes).pack(side=BOTTOM)
Button(root, text='Discard', command=do_close).pack(side=BOTTOM)
root.mainloop()
return self.retval
def interactive_ask_diff(self, code, tmpfn, reffn, testid):
from os import environ
if 'UNITTEST_INTERACTIVE' not in environ:
return False
from tkinter import Tk, Label, LEFT, RIGHT, BOTTOM, Button
from PIL import Image, ImageTk
self.retval = False
root = Tk()
def do_close():
root.destroy()
def do_yes():
self.retval = True
do_close()
phototmp = ImageTk.PhotoImage(Image.open(tmpfn))
photoref = ImageTk.PhotoImage(Image.open(reffn))
Label(root, text='The test %s\nhave generated an different'
'image as the reference one..' % testid).pack()
Label(root, text='Which one is good ?').pack()
Label(root, text=code, justify=LEFT).pack(side=RIGHT)
Label(root, image=phototmp).pack(side=RIGHT)
Label(root, image=photoref).pack(side=LEFT)
Button(root, text='Use the new image -->',
command=do_yes).pack(side=BOTTOM)
Button(root, text='<-- Use the reference',
command=do_close).pack(side=BOTTOM)
root.mainloop()
return self.retval
def advance_frames(self, count):
'''Render the new frames and:
* tick the Clock
* dispatch input from all registered providers
* flush all the canvas operations
* redraw Window canvas if necessary
'''
from kivy.base import EventLoop
for i in range(count):
EventLoop.idle()
class UnitTestTouch(MotionEvent):
'''Custom MotionEvent representing a single touch. Similar to `on_touch_*`
methods from the Widget class, this one introduces:
* touch_down
* touch_move
* touch_up
Create a new touch with::
touch = UnitTestTouch(x, y)
then you press it on the default position with::
touch.touch_down()
or move it or even release with these simple calls::
touch.touch_move(new_x, new_y)
touch.touch_up()
'''
def __init__(self, x, y):
'''Create a MotionEvent instance with X and Y of the first
position a touch is at.
'''
from kivy.base import EventLoop
self.eventloop = EventLoop
win = EventLoop.window
super(UnitTestTouch, self).__init__(
# device, (tuio) id, args
self.__class__.__name__, 99, {
"x": x / (win.width - 1.0),
"y": y / (win.height - 1.0),
}
)
def touch_down(self, *args):
self.eventloop.post_dispatch_input("begin", self)
def touch_move(self, x, y):
win = self.eventloop.window
self.move({
"x": x / (win.width - 1.0),
"y": y / (win.height - 1.0)
})
self.eventloop.post_dispatch_input("update", self)
def touch_up(self, *args):
self.eventloop.post_dispatch_input("end", self)
def depack(self, args):
# set MotionEvent to touch
self.is_touch = True
# set sx/sy properties to ratio (e.g. X / win.width)
self.sx = args['x']
self.sy = args['y']
# set profile to accept x, y and pos properties
self.profile = ['pos']
# run depack after we set the values
super(UnitTestTouch, self).depack(args)
class UTMotionEvent(MotionEvent):
def depack(self, args):
self.is_touch = True
self.sx = args['x']
self.sy = args['y']
self.profile = ['pos']
super(UTMotionEvent, self).depack(args)
def async_run(func=None, app_cls_func=None):
def inner_func(func):
if 'mock' == cgl_get_backend_name():
return pytest.mark.skip(
reason='Skipping because gl backend is set to mock')(func)
if sys.version_info[0] < 3 or sys.version_info[1] <= 5:
return pytest.mark.skip(
reason='Skipping because graphics tests are not supported on '
'py3.5, only on py3.6+')(func)
if app_cls_func is not None:
func = pytest.mark.parametrize(
"kivy_app", [[app_cls_func], ], indirect=True)(func)
if kivy_eventloop == 'asyncio':
try:
import pytest_asyncio
return pytest.mark.asyncio(func)
except ImportError:
return pytest.mark.skip(
reason='KIVY_EVENTLOOP == "asyncio" but '
'"pytest-asyncio" is not installed')(func)
elif kivy_eventloop == 'trio':
try:
import trio
from pytest_trio import trio_fixture
func._force_trio_fixture = True
return func
except ImportError:
return pytest.mark.skip(
reason='KIVY_EVENTLOOP == "trio" but '
'"pytest-trio" is not installed')(func)
else:
return pytest.mark.skip(
reason='KIVY_EVENTLOOP must be set to either of "asyncio" or '
'"trio" to run async tests')(func)
if func is None:
return inner_func
return inner_func(func)
| 34.535373
| 79
| 0.57203
|
acfe57e8bdc9388362940d9496366fc0738f636d
| 2,590
|
py
|
Python
|
server/app.py
|
adeept/adeept_alter
|
6adf00eb141405fc3abad44965f81ba7797dd962
|
[
"MIT"
] | 1
|
2021-12-21T15:50:57.000Z
|
2021-12-21T15:50:57.000Z
|
server/app.py
|
adeept/adeept_alter
|
6adf00eb141405fc3abad44965f81ba7797dd962
|
[
"MIT"
] | 2
|
2021-03-14T22:05:42.000Z
|
2021-07-19T22:13:37.000Z
|
server/app.py
|
adeept/adeept_alter
|
6adf00eb141405fc3abad44965f81ba7797dd962
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from importlib import import_module
import os
from flask import Flask, render_template, Response, send_from_directory
from flask_cors import *
# import camera driver
from camera_opencv import Camera
from camera_opencv import commandAct
import threading
# Raspberry Pi camera module (requires picamera package)
# from camera_pi import Camera
app = Flask(__name__)
CORS(app, supports_credentials=True)
camera = Camera()
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/video_feed')
def video_feed():
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen(camera),
mimetype='multipart/x-mixed-replace; boundary=frame')
dir_path = os.path.dirname(os.path.realpath(__file__))
@app.route('/api/img/<path:filename>')
def sendimg(filename):
return send_from_directory(dir_path+'/dist/img', filename)
@app.route('/js/<path:filename>')
def sendjs(filename):
return send_from_directory(dir_path+'/dist/js', filename)
@app.route('/css/<path:filename>')
def sendcss(filename):
return send_from_directory(dir_path+'/dist/css', filename)
@app.route('/api/img/icon/<path:filename>')
def sendicon(filename):
return send_from_directory(dir_path+'/dist/img/icon', filename)
@app.route('/fonts/<path:filename>')
def sendfonts(filename):
return send_from_directory(dir_path+'/dist/fonts', filename)
@app.route('/<path:filename>')
def sendgen(filename):
return send_from_directory(dir_path+'/dist', filename)
@app.route('/')
def index():
return send_from_directory(dir_path+'/dist', 'index.html')
class webapp:
def __init__(self):
self.camera = camera
def modeselect(self, modeInput):
Camera.modeSelect = modeInput
if modeInput == 'none':
camera.alterStop()
def commandInput(self, inputCommand, valueA=None):
commandAct(inputCommand, valueA)
def colorFindSet(self, H, S, V):
camera.colorFindSet(H, S, V)
def thread(self):
app.run(host='0.0.0.0', threaded=True)
def startthread(self):
fps_threading=threading.Thread(target=self.thread) #Define a thread for FPV and OpenCV
fps_threading.setDaemon(False) #'True' means it is a front thread,it would close when the mainloop() closes
fps_threading.start() #Thread starts
| 30.116279
| 143
| 0.679537
|
acfe58865166e5eb1b5af3d025f8eaf3c17864ec
| 19,507
|
py
|
Python
|
release/scripts/addons/add_mesh_BoltFactory/Boltfactory.py
|
simileV/blenderStereo29
|
09b993449aaca671a9eb2a6a22327246936eb3db
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2020-07-20T15:41:58.000Z
|
2020-07-20T15:41:58.000Z
|
release/scripts/addons/add_mesh_BoltFactory/Boltfactory.py
|
ringsce/Rings3D
|
8059d1e2460fc8d6f101eff8e695f68a99f6671d
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
release/scripts/addons/add_mesh_BoltFactory/Boltfactory.py
|
ringsce/Rings3D
|
8059d1e2460fc8d6f101eff8e695f68a99f6671d
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from mathutils import Matrix
from bpy.types import Operator
from bpy_extras.object_utils import AddObjectHelper
from bpy.props import (
BoolProperty,
EnumProperty,
FloatProperty,
IntProperty,
FloatVectorProperty,
StringProperty,
)
from . import createMesh
from bpy_extras import object_utils
class add_mesh_bolt(Operator, AddObjectHelper):
bl_idname = "mesh.bolt_add"
bl_label = "Add Bolt"
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
bl_description = "Construct many types of Bolts"
MAX_INPUT_NUMBER = 50
Bolt : BoolProperty(name = "Bolt",
default = True,
description = "Bolt")
change : BoolProperty(name = "Change",
default = False,
description = "change Bolt")
# Model Types
Model_Type_List = [('bf_Model_Bolt', 'BOLT', 'Bolt Model'),
('bf_Model_Nut', 'NUT', 'Nut Model')]
bf_Model_Type: EnumProperty(
attr='bf_Model_Type',
name='Model',
description='Choose the type off model you would like',
items=Model_Type_List, default='bf_Model_Bolt'
)
# Head Types
Model_Type_List = [('bf_Head_Hex', 'HEX', 'Hex Head'),
('bf_Head_12Pnt', '12 POINT', '12 Point Head'),
('bf_Head_Cap', 'CAP', 'Cap Head'),
('bf_Head_Dome', 'DOME', 'Dome Head'),
('bf_Head_Pan', 'PAN', 'Pan Head'),
('bf_Head_CounterSink', 'COUNTER SINK', 'Counter Sink Head')]
bf_Head_Type: EnumProperty(
attr='bf_Head_Type',
name='Head',
description='Choose the type off Head you would like',
items=Model_Type_List, default='bf_Head_Hex'
)
# Bit Types
Bit_Type_List = [('bf_Bit_None', 'NONE', 'No Bit Type'),
('bf_Bit_Allen', 'ALLEN', 'Allen Bit Type'),
('bf_Bit_Torx', 'TORX', 'Torx Bit Type'),
('bf_Bit_Philips', 'PHILLIPS', 'Phillips Bit Type')]
bf_Bit_Type: EnumProperty(
attr='bf_Bit_Type',
name='Bit Type',
description='Choose the type of bit to you would like',
items=Bit_Type_List, default='bf_Bit_None'
)
# Nut Types
Nut_Type_List = [('bf_Nut_Hex', 'HEX', 'Hex Nut'),
('bf_Nut_Lock', 'LOCK', 'Lock Nut'),
('bf_Nut_12Pnt', '12 POINT', '12 Point Nut')]
bf_Nut_Type: EnumProperty(
attr='bf_Nut_Type',
name='Nut Type',
description='Choose the type of nut you would like',
items=Nut_Type_List, default='bf_Nut_Hex'
)
# Shank Types
bf_Shank_Length: FloatProperty(
attr='bf_Shank_Length',
name='Shank Length', default=0,
min=0, soft_min=0, max=MAX_INPUT_NUMBER,
description='Length of the unthreaded shank',
unit='LENGTH',
)
bf_Shank_Dia: FloatProperty(
attr='bf_Shank_Dia',
name='Shank Dia', default=3,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Diameter of the shank',
unit='LENGTH',
)
bf_Phillips_Bit_Depth: FloatProperty(
attr='bf_Phillips_Bit_Depth',
name='Bit Depth', default=1.1431535482406616,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Depth of the Phillips Bit',
unit='LENGTH',
)
bf_Allen_Bit_Depth: FloatProperty(
attr='bf_Allen_Bit_Depth',
name='Bit Depth', default=1.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Depth of the Allen Bit',
unit='LENGTH',
)
bf_Allen_Bit_Flat_Distance: FloatProperty(
attr='bf_Allen_Bit_Flat_Distance',
name='Flat Dist', default=2.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Flat Distance of the Allen Bit',
unit='LENGTH',
)
# Torx Size Types
Torx_Size_Type_List = [('bf_Torx_T10', 'T10', 'T10'),
('bf_Torx_T20', 'T20', 'T20'),
('bf_Torx_T25', 'T25', 'T25'),
('bf_Torx_T30', 'T30', 'T30'),
('bf_Torx_T40', 'T40', 'T40'),
('bf_Torx_T50', 'T50', 'T50'),
('bf_Torx_T55', 'T55', 'T55'),
]
bf_Torx_Size_Type: EnumProperty(
attr='bf_Torx_Size_Type',
name='Torx Size',
description='Size of the Torx Bit',
items=Torx_Size_Type_List, default='bf_Torx_T20'
)
bf_Torx_Bit_Depth: FloatProperty(
attr='bf_Torx_Bit_Depth',
name='Bit Depth', default=1.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Depth of the Torx Bit',
unit='LENGTH',
)
bf_Hex_Head_Height: FloatProperty(
attr='bf_Hex_Head_Height',
name='Head Height', default=2,
min=0, soft_min=0, max=MAX_INPUT_NUMBER,
description='Height of the Hex Head',
unit='LENGTH',
)
bf_Hex_Head_Flat_Distance: FloatProperty(
attr='bf_Hex_Head_Flat_Distance',
name='Flat Dist', default=5.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Flat Distance of the Hex Head',
unit='LENGTH',
)
bf_12_Point_Head_Height: FloatProperty(
attr='bf_12_Point_Head_Height',
name='Head Height', default=3.0,
min=0, soft_min=0, max=MAX_INPUT_NUMBER,
description='Height of the 12 Point Head',
unit='LENGTH',
)
bf_12_Point_Head_Flat_Distance: FloatProperty(
attr='bf_12_Point_Head_Flat_Distance',
name='Flat Dist', default=3.0,
min=0.001, soft_min=0, #limit to 0.001 to avoid calculation error
max=MAX_INPUT_NUMBER,
description='Flat Distance of the 12 Point Head',
unit='LENGTH',
)
bf_12_Point_Head_Flange_Dia: FloatProperty(
attr='bf_12_Point_Head_Flange_Dia',
name='12 Point Head Flange Dia', default=5.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Flange diameter of the 12 point Head',
unit='LENGTH',
)
bf_CounterSink_Head_Dia: FloatProperty(
attr='bf_CounterSink_Head_Dia',
name='Head Dia', default=6.300000190734863,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Diameter of the Counter Sink Head',
unit='LENGTH',
)
bf_Cap_Head_Height: FloatProperty(
attr='bf_Cap_Head_Height',
name='Head Height', default=3,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Height of the Cap Head',
unit='LENGTH',
)
bf_Cap_Head_Dia: FloatProperty(
attr='bf_Cap_Head_Dia',
name='Head Dia', default=5.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Diameter of the Cap Head',
unit='LENGTH',
)
bf_Dome_Head_Dia: FloatProperty(
attr='bf_Dome_Head_Dia',
name='Dome Head Dia', default=5.599999904632568,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Length of the unthreaded shank',
unit='LENGTH',
)
bf_Pan_Head_Dia: FloatProperty(
attr='bf_Pan_Head_Dia',
name='Pan Head Dia', default=5.599999904632568,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Diameter of the Pan Head',
unit='LENGTH',
)
bf_Philips_Bit_Dia: FloatProperty(
attr='bf_Philips_Bit_Dia',
name='Bit Dia', default=1.8199999332427979,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Diameter of the Philips Bit',
unit='LENGTH',
)
bf_Thread_Length: FloatProperty(
attr='bf_Thread_Length',
name='Thread Length', default=6,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Length of the Thread',
unit='LENGTH',
)
bf_Major_Dia: FloatProperty(
attr='bf_Major_Dia',
name='Major Dia', default=3,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Outside diameter of the Thread',
unit='LENGTH',
)
bf_Pitch: FloatProperty(
attr='bf_Pitch',
name='Pitch', default=0.3499999940395355,
min=0.1, soft_min=0.1,
max=7.0,
description='Pitch if the thread',
unit='LENGTH',
)
bf_Minor_Dia: FloatProperty(
attr='bf_Minor_Dia',
name='Minor Dia', default=2.6211137771606445,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Inside diameter of the Thread',
unit='LENGTH',
)
bf_Crest_Percent: IntProperty(
attr='bf_Crest_Percent',
name='Crest Percent', default=10,
min=1, soft_min=1,
max=90,
description='Percent of the pitch that makes up the Crest',
)
bf_Root_Percent: IntProperty(
attr='bf_Root_Percent',
name='Root Percent', default=10,
min=1, soft_min=1,
max=90,
description='Percent of the pitch that makes up the Root',
)
bf_Div_Count: IntProperty(
attr='bf_Div_Count',
name='Div count', default=36,
min=4, soft_min=4,
max=4096,
description='Div count determine circle resolution',
)
bf_Hex_Nut_Height: FloatProperty(
attr='bf_Hex_Nut_Height',
name='Hex Nut Height', default=2.4000000953674316,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Height of the Hex Nut',
unit='LENGTH',
)
bf_Hex_Nut_Flat_Distance: FloatProperty(
attr='bf_Hex_Nut_Flat_Distance',
name='Hex Nut Flat Dist', default=5.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Flat distance of the Hex Nut',
unit='LENGTH',
)
bf_12_Point_Nut_Height: FloatProperty(
attr='bf_12_Point_Nut_Height',
name='12 Point Nut Height', default=2.4000000953674316,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Height of the 12 Point Nut',
unit='LENGTH',
)
bf_12_Point_Nut_Flat_Distance: FloatProperty(
attr='bf_12_Point_Nut_Flat_Distance',
name='12 Point Nut Flat Dist', default=3.0,
min=0.001, soft_min=0, #limit to 0.001 to avoid calculation error
max=MAX_INPUT_NUMBER,
description='Flat distance of the 12 point Nut',
unit='LENGTH',
)
bf_12_Point_Nut_Flange_Dia: FloatProperty(
attr='bf_12_Point_Nut_Flange_Dia',
name='12 Point Nut Flange Dia', default=5.5,
min=0, soft_min=0,
max=MAX_INPUT_NUMBER,
description='Flange diameter of the 12 point Nut',
unit='LENGTH',
)
def draw(self, context):
layout = self.layout
col = layout.column()
# ENUMS
col.prop(self, 'bf_Model_Type')
col.separator()
# Bit
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Bit_Type')
if self.bf_Bit_Type == 'bf_Bit_None':
pass
elif self.bf_Bit_Type == 'bf_Bit_Allen':
col.prop(self, 'bf_Allen_Bit_Depth')
col.prop(self, 'bf_Allen_Bit_Flat_Distance')
elif self.bf_Bit_Type == 'bf_Bit_Torx':
col.prop(self, 'bf_Torx_Bit_Depth')
col.prop(self, 'bf_Torx_Size_Type')
elif self.bf_Bit_Type == 'bf_Bit_Philips':
col.prop(self, 'bf_Phillips_Bit_Depth')
col.prop(self, 'bf_Philips_Bit_Dia')
col.separator()
# Head
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Head_Type')
if self.bf_Head_Type == 'bf_Head_Hex':
col.prop(self, 'bf_Hex_Head_Height')
col.prop(self, 'bf_Hex_Head_Flat_Distance')
elif self.bf_Head_Type == 'bf_Head_12Pnt':
col.prop(self, 'bf_12_Point_Head_Height')
col.prop(self, 'bf_12_Point_Head_Flat_Distance')
col.prop(self, 'bf_12_Point_Head_Flange_Dia')
elif self.bf_Head_Type == 'bf_Head_Cap':
col.prop(self, 'bf_Cap_Head_Height')
col.prop(self, 'bf_Cap_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_Dome':
col.prop(self, 'bf_Dome_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_Pan':
col.prop(self, 'bf_Pan_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_CounterSink':
col.prop(self, 'bf_CounterSink_Head_Dia')
col.separator()
# Shank
if self.bf_Model_Type == 'bf_Model_Bolt':
col.label(text='Shank')
col.prop(self, 'bf_Shank_Length')
col.prop(self, 'bf_Shank_Dia')
col.separator()
# Nut
if self.bf_Model_Type == 'bf_Model_Nut':
col.prop(self, 'bf_Nut_Type')
if self.bf_Nut_Type == "bf_Nut_12Pnt":
col.prop(self, 'bf_12_Point_Nut_Height')
col.prop(self, 'bf_12_Point_Nut_Flat_Distance')
col.prop(self, 'bf_12_Point_Nut_Flange_Dia')
else:
col.prop(self, 'bf_Hex_Nut_Height')
col.prop(self, 'bf_Hex_Nut_Flat_Distance')
# Thread
col.label(text='Thread')
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Thread_Length')
col.prop(self, 'bf_Major_Dia')
col.prop(self, 'bf_Minor_Dia')
col.prop(self, 'bf_Pitch')
col.prop(self, 'bf_Crest_Percent')
col.prop(self, 'bf_Root_Percent')
col.prop(self, 'bf_Div_Count')
if self.change == False:
# generic transform props
col.separator()
col.prop(self, 'align')
col.prop(self, 'location')
col.prop(self, 'rotation')
@classmethod
def poll(cls, context):
return context.scene is not None
def execute(self, context):
if bpy.context.mode == "OBJECT":
if context.selected_objects != [] and context.active_object and \
('Bolt' in context.active_object.data.keys()) and (self.change == True):
obj = context.active_object
oldmesh = obj.data
oldmeshname = obj.data.name
mesh = createMesh.Create_New_Mesh(self, context)
obj.data = mesh
try:
bpy.ops.object.vertex_group_remove(all=True)
except:
pass
for material in oldmesh.materials:
obj.data.materials.append(material)
bpy.data.meshes.remove(oldmesh)
obj.data.name = oldmeshname
else:
mesh = createMesh.Create_New_Mesh(self, context)
obj = object_utils.object_data_add(context, mesh, operator=self)
obj.data["Bolt"] = True
obj.data["change"] = False
for prm in BoltParameters():
obj.data[prm] = getattr(self, prm)
if bpy.context.mode == "EDIT_MESH":
active_object = context.active_object
name_active_object = active_object.name
bpy.ops.object.mode_set(mode='OBJECT')
mesh = createMesh.Create_New_Mesh(self, context)
obj = object_utils.object_data_add(context, mesh, operator=self)
obj.select_set(True)
active_object.select_set(True)
bpy.ops.object.join()
context.active_object.name = name_active_object
bpy.ops.object.mode_set(mode='EDIT')
return {'FINISHED'}
def invoke(self, context, event):
self.execute(context)
return {'FINISHED'}
# Register:
def Bolt_contex_menu(self, context):
bl_label = 'Change'
obj = context.object
layout = self.layout
if 'Bolt' in obj.data.keys():
props = layout.operator("mesh.bolt_add", text="Change Bolt")
props.change = True
for prm in BoltParameters():
setattr(props, prm, obj.data[prm])
layout.separator()
def menu_func_bolt(self, context):
layout = self.layout
layout.separator()
oper = self.layout.operator(add_mesh_bolt.bl_idname, text="Bolt", icon="MOD_SCREW")
oper.change = False
classes = (
add_mesh_bolt,
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.VIEW3D_MT_mesh_add.append(menu_func_bolt)
bpy.types.VIEW3D_MT_object_context_menu.prepend(Bolt_contex_menu)
def unregister():
bpy.types.VIEW3D_MT_object_context_menu.remove(Bolt_contex_menu)
bpy.types.VIEW3D_MT_mesh_add.remove(menu_func_bolt)
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
def BoltParameters():
BoltParameters = [
"bf_Model_Type",
"bf_Head_Type",
"bf_Bit_Type",
"bf_Nut_Type",
"bf_Shank_Length",
"bf_Shank_Dia",
"bf_Phillips_Bit_Depth",
"bf_Allen_Bit_Depth",
"bf_Allen_Bit_Flat_Distance",
"bf_Torx_Bit_Depth",
"bf_Torx_Size_Type",
"bf_Hex_Head_Height",
"bf_Hex_Head_Flat_Distance",
"bf_CounterSink_Head_Dia",
"bf_Cap_Head_Height",
"bf_Cap_Head_Dia",
"bf_Dome_Head_Dia",
"bf_Pan_Head_Dia",
"bf_Philips_Bit_Dia",
"bf_Thread_Length",
"bf_Major_Dia",
"bf_Pitch",
"bf_Minor_Dia",
"bf_Crest_Percent",
"bf_Root_Percent",
"bf_Div_Count",
"bf_Hex_Nut_Height",
"bf_Hex_Nut_Flat_Distance",
]
return BoltParameters
| 35.661792
| 87
| 0.566053
|
acfe58cbd4c863d0884a7d3b62947d294fe39a9c
| 24,496
|
py
|
Python
|
got10k/experiments/vot.py
|
mhd-medfa/got10k-toolkit
|
47f31a11f0ae1d4f7e6b2db6d22e71eb28a6e792
|
[
"MIT"
] | 3
|
2020-04-20T16:43:39.000Z
|
2021-06-14T09:40:15.000Z
|
got10k/experiments/vot.py
|
mhd-medfa/got10k-toolkit
|
47f31a11f0ae1d4f7e6b2db6d22e71eb28a6e792
|
[
"MIT"
] | null | null | null |
got10k/experiments/vot.py
|
mhd-medfa/got10k-toolkit
|
47f31a11f0ae1d4f7e6b2db6d22e71eb28a6e792
|
[
"MIT"
] | 2
|
2019-11-14T16:53:32.000Z
|
2021-11-11T20:45:25.000Z
|
from __future__ import absolute_import, division, print_function
import time
import numpy as np
import os
import glob
import warnings
import json
from PIL import Image
from ..datasets import VOT
from ..utils.metrics import poly_iou
from ..utils.viz import show_frame
class ExperimentVOT(object):
r"""Experiment pipeline and evaluation toolkit for VOT dataset.
Notes:
- The tracking results of three types of experiments ``supervised``
``unsupervised`` and ``realtime`` are compatible with the official
VOT toolkit <https://github.com/votchallenge/vot-toolkit/>`.
- TODO: The evaluation function for VOT tracking results is still
under development.
Args:
root_dir (string): Root directory of VOT dataset where sequence
folders exist.
version (integer, optional): Specify the VOT dataset version. Specify as
one of 2013~2018. Default is 2017.
list_file (string, optional): If provided, only run experiments over
sequences specified by the file.
read_image (boolean, optional): If True, return the read PIL image in
each frame. Otherwise only return the image path. Default is True.
experiments (string or tuple): Specify the type(s) of experiments to run.
Default is a tuple (``supervised``, ``unsupervised``, ``realtime``).
result_dir (string, optional): Directory for storing tracking
results. Default is ``./results``.
report_dir (string, optional): Directory for storing performance
evaluation results. Default is ``./reports``.
"""
def __init__(self, root_dir, version=2017,
read_image=True, list_file=None,
experiments=('supervised', 'unsupervised', 'realtime'),
result_dir='results', report_dir='reports', start_idx=0, end_idx=None):
super(ExperimentVOT, self).__init__()
if isinstance(experiments, str):
experiments = (experiments,)
assert all([e in ['supervised', 'unsupervised', 'realtime']
for e in experiments])
self.dataset = VOT(
root_dir, version, anno_type='default',
download=True, return_meta=True, list_file=list_file)
self.experiments = experiments
if version == 'LT2018':
version = '-' + version
self.long_term = True
else:
self.long_term = False
self.read_image = read_image
self.result_dir = os.path.join(result_dir, 'VOT' + str(version))
self.report_dir = os.path.join(report_dir, 'VOT' + str(version))
self.skip_initialize = 5
self.burnin = 10
self.repetitions = 15
self.sensitive = 100
self.nbins_eao = 1500
self.tags = ['camera_motion', 'illum_change', 'occlusion',
'size_change', 'motion_change', 'empty']
self.start_idx = start_idx
self.end_idx = end_idx
if self.end_idx is None:
self.end_idx = len(self.dataset)
def run(self, tracker, visualize=False):
print('Running tracker %s on %s...' % (
tracker.name, type(self.dataset).__name__))
# run all specified experiments
if 'supervised' in self.experiments:
self.run_supervised(tracker, visualize)
if 'unsupervised' in self.experiments:
self.run_unsupervised(tracker, visualize)
if 'realtime' in self.experiments:
self.run_realtime(tracker, visualize)
def run_supervised(self, tracker, visualize=False):
print('Running supervised experiment...')
# loop over the complete dataset
end_idx = self.end_idx
if end_idx is None:
end_idx = len(self.dataset)
for s in range(self.start_idx, end_idx):
img_files, anno, _ = self.dataset[s]
seq_name = self.dataset.seq_names[s]
print('--Sequence %d/%d: %s' % (s + 1, len(self.dataset), seq_name))
# rectangular bounding boxes
anno_rects = anno.copy()
if anno_rects.shape[1] == 8:
anno_rects = self.dataset._corner2rect(anno_rects)
# run multiple repetitions for each sequence
for r in range(self.repetitions):
# check if the tracker is deterministic
if r > 0 and tracker.is_deterministic:
break
elif r == 3 and self._check_deterministic('baseline', tracker.name, seq_name):
print(' Detected a deterministic tracker, ' +
'skipping remaining trials.')
break
print(' Repetition: %d' % (r + 1))
# skip if results exist
record_file = os.path.join(
self.result_dir, tracker.name, 'baseline', seq_name,
'%s_%03d.txt' % (seq_name, r + 1))
if os.path.exists(record_file):
print(' Found results, skipping', seq_name)
continue
# state variables
boxes = []
times = []
failure = False
next_start = -1
# tracking loop
for f, img_file in enumerate(img_files):
image = Image.open(img_file)
if self.read_image:
frame = image
else:
frame = img_file
start_time = time.time()
if f == 0:
# initial frame
tracker.init(frame, anno_rects[0])
boxes.append([1])
elif failure:
# during failure frames
if f == next_start:
failure = False
tracker.init(frame, anno_rects[f])
boxes.append([1])
else:
start_time = np.NaN
boxes.append([0])
else:
# during success frames
box = tracker.update(frame)
iou = poly_iou(anno[f], box, bound=image.size)
if iou <= 0.0:
# tracking failure
failure = True
next_start = f + self.skip_initialize
boxes.append([2])
else:
# tracking succeed
boxes.append(box)
# store elapsed time
times.append(time.time() - start_time)
# visualize if required
if visualize:
if len(boxes[-1]) == 4:
show_frame(image, boxes[-1])
else:
show_frame(image)
# record results
if self.long_term:
self._record(record_file, boxes, times, confs)
else:
self._record(record_file, boxes, times)
def run_unsupervised(self, tracker, visualize=False):
print('Running unsupervised experiment...')
# loop over the complete dataset
end_idx = self.end_idx
if end_idx is None:
end_idx = len(self.dataset)
for s in range(self.start_idx, end_idx):
img_files, anno, _ = self.dataset[s]
seq_name = self.dataset.seq_names[s]
print('--Sequence %d/%d: %s' % (s + 1, len(self.dataset), seq_name))
# skip if results exist
record_file = os.path.join(
self.result_dir, tracker.name, 'unsupervised', seq_name,
'%s_001.txt' % seq_name)
if os.path.exists(record_file):
print(' Found results, skipping', seq_name)
continue
if hasattr(tracker, 'set_video_name'):
tracker.set_video_name(seq_name)
# rectangular bounding boxes
anno_rects = anno.copy()
if anno_rects.shape[1] == 8:
anno_rects = self.dataset._corner2rect(anno_rects)
# tracking loop
if self.long_term:
boxes, times, confs = tracker.track(
img_files, anno_rects[0], visualize=visualize, use_confidences=True)
assert len(boxes) == len(anno) == len(confs)
else:
boxes, times = tracker.track(
img_files, anno_rects[0], visualize=visualize)
assert len(boxes) == len(anno)
if hasattr(tracker, 'set_video_name'):
tracker.set_video_name(None)
# re-formatting
boxes = list(boxes)
boxes[0] = [1]
# record results
if self.long_term:
self._record(record_file, boxes, times, confs)
else:
self._record(record_file, boxes, times)
def run_realtime(self, tracker, visualize=False):
print('Running real-time experiment...')
# loop over the complete dataset
for s, (img_files, anno, _) in enumerate(self.dataset):
seq_name = self.dataset.seq_names[s]
print('--Sequence %d/%d: %s' % (s + 1, len(self.dataset), seq_name))
# skip if results exist
record_file = os.path.join(
self.result_dir, tracker.name, 'realtime', seq_name,
'%s_001.txt' % seq_name)
if os.path.exists(record_file):
print(' Found results, skipping', seq_name)
continue
# rectangular bounding boxes
anno_rects = anno.copy()
if anno_rects.shape[1] == 8:
anno_rects = self.dataset._corner2rect(anno_rects)
# state variables
boxes = []
times = []
next_start = 0
failure = False
failed_frame = -1
total_time = 0.0
grace = 3 - 1
offset = 0
# tracking loop
for f, img_file in enumerate(img_files):
image = Image.open(img_file)
if self.read_image:
frame = image
else:
frame = img_file
start_time = time.time()
if f == next_start:
# during initial frames
tracker.init(frame, anno_rects[f])
boxes.append([1])
# reset state variables
failure = False
failed_frame = -1
total_time = 0.0
grace = 3 - 1
offset = f
elif not failure:
# during success frames
# calculate current frame
if grace > 0:
total_time += 1000.0 / 25
grace -= 1
else:
total_time += max(1000.0 / 25, last_time * 1000.0)
current = offset + int(np.round(np.floor(total_time * 25) / 1000.0))
# delayed/tracked bounding box
if f < current:
box = boxes[-1]
elif f == current:
box = tracker.update(frame)
iou = poly_iou(anno[f], box, bound=image.size)
if iou <= 0.0:
# tracking failure
failure = True
failed_frame = f
next_start = current + self.skip_initialize
boxes.append([2])
else:
# tracking succeed
boxes.append(box)
else:
# during failure frames
if f < current:
# skipping frame due to slow speed
boxes.append([0])
start_time = np.NaN
elif f == current:
# current frame
box = tracker.update(frame)
iou = poly_iou(anno[f], box, bound=image.size)
if iou <= 0.0:
# tracking failure
boxes.append([2])
boxes[failed_frame] = [0]
times[failed_frame] = np.NaN
else:
# tracking succeed
boxes.append(box)
elif f < next_start:
# skipping frame due to failure
boxes.append([0])
start_time = np.NaN
# store elapsed time
last_time = time.time() - start_time
times.append(last_time)
# visualize if required
if visualize:
if len(boxes[-1]) == 4:
show_frame(image, boxes[-1])
else:
show_frame(image)
# record results
self._record(record_file, boxes, times)
def report(self, tracker_names):
assert isinstance(tracker_names, (list, tuple))
# function for loading results
def read_record(filename):
with open(filename) as f:
record = f.read().strip().split('\n')
record = [[float(t) for t in line.split(',')]
for line in record]
return record
# assume tracker_names[0] is your tracker
report_dir = os.path.join(self.report_dir, tracker_names[0])
if not os.path.exists(report_dir):
os.makedirs(report_dir)
report_file = os.path.join(report_dir, 'performance.json')
performance = {}
for name in tracker_names:
print('Evaluating', name)
ious = {}
ious_full = {}
failures = {}
times = {}
masks = {} # frame masks for attribute tags
for s, (img_files, anno, meta) in enumerate(self.dataset):
seq_name = self.dataset.seq_names[s]
# initialize frames scores
frame_num = len(img_files)
ious[seq_name] = np.full(
(self.repetitions, frame_num), np.nan, dtype=float)
ious_full[seq_name] = np.full(
(self.repetitions, frame_num), np.nan, dtype=float)
failures[seq_name] = np.full(
(self.repetitions, frame_num), np.nan, dtype=float)
times[seq_name] = np.full(
(self.repetitions, frame_num), np.nan, dtype=float)
# read results of all repetitions
record_files = sorted(glob.glob(os.path.join(
self.result_dir, name, 'baseline', seq_name,
'%s_[0-9]*.txt' % seq_name)))
boxes = [read_record(f) for f in record_files]
assert all([len(b) == len(anno) for b in boxes])
# calculate frame ious with burnin
bound = Image.open(img_files[0]).size
seq_ious = [self._calc_iou(b, anno, bound, burnin=True)
for b in boxes]
ious[seq_name][:len(seq_ious), :] = seq_ious
# calculate frame ious without burnin
seq_ious_full = [self._calc_iou(b, anno, bound)
for b in boxes]
ious_full[seq_name][:len(seq_ious_full), :] = seq_ious_full
# calculate frame failures
seq_failures = [
[len(b) == 1 and b[0] == 2 for b in boxes_per_rep]
for boxes_per_rep in boxes]
failures[seq_name][:len(seq_failures), :] = seq_failures
# collect frame runtimes
time_file = os.path.join(
self.result_dir, name, 'baseline', seq_name,
'%s_time.txt' % seq_name)
if os.path.exists(time_file):
seq_times = np.loadtxt(time_file, delimiter=',').T
times[seq_name][:len(seq_times), :] = seq_times
# collect attribute masks
tag_num = len(self.tags)
masks[seq_name] = np.zeros((tag_num, frame_num), bool)
for i, tag in enumerate(self.tags):
if tag in meta:
masks[seq_name][i, :] = meta[tag]
# frames with no tags
if 'empty' in self.tags:
tag_frames = np.array([
v for k, v in meta.items()
if not 'practical' in k], dtype=bool)
ind = self.tags.index('empty')
masks[seq_name][ind, :] = \
~np.logical_or.reduce(tag_frames, axis=0)
# concatenate frames
seq_names = self.dataset.seq_names
masks = np.concatenate(
[masks[s] for s in seq_names], axis=1)
ious = np.concatenate(
[ious[s] for s in seq_names], axis=1)
failures = np.concatenate(
[failures[s] for s in seq_names], axis=1)
with warnings.catch_warnings():
# average over repetitions
warnings.simplefilter('ignore', category=RuntimeWarning)
ious = np.nanmean(ious, axis=0)
failures = np.nanmean(failures, axis=0)
# calculate average overlaps and failures for each tag
tag_ious = np.array(
[np.nanmean(ious[m]) for m in masks])
tag_failures = np.array(
[np.nansum(failures[m]) for m in masks])
tag_frames = masks.sum(axis=1)
# remove nan values
tag_ious[np.isnan(tag_ious)] = 0.0
tag_weights = tag_frames / tag_frames.sum()
# calculate weighted accuracy and robustness
accuracy = np.sum(tag_ious * tag_weights)
robustness = np.sum(tag_failures * tag_weights)
# calculate tracking speed
times = np.concatenate([
t.reshape(-1) for t in times.values()])
# remove invalid values
times = times[~np.isnan(times)]
times = times[times > 0]
if len(times) > 0:
speed = np.mean(1. / times)
else:
speed = -1
performance.update({name: {
'accuracy': accuracy,
'robustness': robustness,
'speed_fps': speed}})
# save performance
with open(report_file, 'w') as f:
json.dump(performance, f, indent=4)
print('Performance saved at', report_file)
return performance
def show(self, tracker_names, seq_names=None, play_speed=1,
experiment='supervised'):
if seq_names is None:
seq_names = self.dataset.seq_names
elif isinstance(seq_names, str):
seq_names = [seq_names]
assert isinstance(tracker_names, (list, tuple))
assert isinstance(seq_names, (list, tuple))
assert experiment in ['supervised', 'unsupervised', 'realtime']
play_speed = int(round(play_speed))
assert play_speed > 0
# "supervised" experiment results are stored in "baseline" folder
if experiment == 'supervised':
experiment = 'baseline'
# function for loading results
def read_record(filename):
with open(filename) as f:
record = f.read().strip().split('\n')
record = [[float(t) for t in line.split(',')]
for line in record]
for i, r in enumerate(record):
if len(r) == 4:
record[i] = np.array(r)
elif len(r) == 8:
r = np.array(r)[np.newaxis, :]
r = self.dataset._corner2rect(r)
record[i] = r[0]
else:
record[i] = np.zeros(4)
return record
for s, seq_name in enumerate(seq_names):
print('[%d/%d] Showing results on %s...' % (
s + 1, len(seq_names), seq_name))
# load all tracking results
records = {}
for name in tracker_names:
record_file = os.path.join(
self.result_dir, name, experiment, seq_name,
'%s_001.txt' % seq_name)
records[name] = read_record(record_file)
# loop over the sequence and display results
img_files, anno, _ = self.dataset[seq_name]
if anno.shape[1] == 8:
anno = self.dataset._corner2rect(anno)
for f, img_file in enumerate(img_files):
if not f % play_speed == 0:
continue
image = Image.open(img_file)
boxes = [anno[f]] + [
records[name][f] for name in tracker_names]
show_frame(image, boxes,
legends=['GroundTruth'] + tracker_names,
colors=['w', 'r', 'g', 'b', 'c', 'm', 'y',
'orange', 'purple', 'brown', 'pink'])
def _record(self, record_file, boxes, times, confs=None):
# convert boxes to string
lines = []
for box in boxes:
if len(box) == 1:
lines.append('%d' % box[0])
else:
lines.append(str.join(',', ['%.4f' % t for t in box]))
# record bounding boxes
record_dir = os.path.dirname(record_file)
if not os.path.isdir(record_dir):
os.makedirs(record_dir)
with open(record_file, 'w') as f:
f.write(str.join('\n', lines))
print(' Results recorded at', record_file)
# record confidences (if available)
if confs is not None:
# convert confs to string
lines = ['%.4f' % c for c in confs]
lines[0] = ''
conf_file = record_file.replace(".txt", "_confidence.value")
with open(conf_file, 'w') as f:
f.write(str.join('\n', lines))
# convert times to string
lines = ['%.4f' % t for t in times]
lines = [t.replace('nan', 'NaN') for t in lines]
# record running times
time_file = record_file[:record_file.rfind('_')] + '_time.txt'
if os.path.exists(time_file):
with open(time_file) as f:
exist_lines = f.read().strip().split('\n')
lines = [t + ',' + s for t, s in zip(exist_lines, lines)]
with open(time_file, 'w') as f:
f.write(str.join('\n', lines))
def _check_deterministic(self, exp, tracker_name, seq_name):
record_dir = os.path.join(
self.result_dir, tracker_name, exp, seq_name)
record_files = sorted(glob.glob(os.path.join(
record_dir, '%s_[0-9]*.txt' % seq_name)))
if len(record_files) < 3:
return False
records = []
for record_file in record_files:
with open(record_file, 'r') as f:
records.append(f.read())
return len(set(records)) == 1
def _calc_iou(self, boxes, anno, bound, burnin=False):
# skip initialization frames
if burnin:
boxes = boxes.copy()
init_inds = [i for i, box in enumerate(boxes)
if box == [1.0]]
for ind in init_inds:
boxes[ind:ind + self.burnin] = [[0]] * self.burnin
# calculate polygon ious
ious = np.array([poly_iou(np.array(a), b, bound)
if len(a) > 1 else np.NaN
for a, b in zip(boxes, anno)])
return ious
| 39.766234
| 94
| 0.494571
|
acfe59306b1e711402f54bffa6fa447b647c75c1
| 430
|
py
|
Python
|
my_scripts/print_model_args.py
|
varisd/fairseq
|
69e2be6ff9b7d130a7454fd1b6a21031f669c622
|
[
"MIT"
] | null | null | null |
my_scripts/print_model_args.py
|
varisd/fairseq
|
69e2be6ff9b7d130a7454fd1b6a21031f669c622
|
[
"MIT"
] | null | null | null |
my_scripts/print_model_args.py
|
varisd/fairseq
|
69e2be6ff9b7d130a7454fd1b6a21031f669c622
|
[
"MIT"
] | 1
|
2022-03-30T19:38:11.000Z
|
2022-03-30T19:38:11.000Z
|
#!/usr/bin/env python3
import argparse
import torch
def main(args):
ckpt = torch.load(args.checkpoint)
ns = ckpt["args"]
for line in ns.__dict__.items():
print(line)
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--checkpoint", type=str, required=True)
return parser.parse_args()
if __name__ == "__main__":
main(parse_args())
| 17.2
| 57
| 0.655814
|
acfe59395534120396139fc27e6463769341de2d
| 1,785
|
py
|
Python
|
applications/plano_de_acao/models/models.py
|
amaurirg/Web2Py
|
235571cd2273a858cbc8f291731672eadf6b8206
|
[
"BSD-3-Clause"
] | null | null | null |
applications/plano_de_acao/models/models.py
|
amaurirg/Web2Py
|
235571cd2273a858cbc8f291731672eadf6b8206
|
[
"BSD-3-Clause"
] | null | null | null |
applications/plano_de_acao/models/models.py
|
amaurirg/Web2Py
|
235571cd2273a858cbc8f291731672eadf6b8206
|
[
"BSD-3-Clause"
] | null | null | null |
DEP = db.define_table('departamento',
Field('setor'),
format = '%(setor)s'
# format=lambda r: r.setor or 'anonymous'
)
PROB = db.define_table('problemas',
Field('problema'),
format = '%(problema)s'
)
PLANO = db.define_table('plano',
Field('depto', 'reference departamento', notnull=True, ondelete='SET NULL', label="Setor"),
Field('titulo', 'reference problemas', notnull=True, label="Título"),
Field('descricao', 'text', notnull=True, label="Descrição"),
Field('plano', 'text', notnull=False, label="Plano"),
Field('acao', 'text', notnull=False, label="Ação"),
Field('prazo', 'date', notnull=True, default=request.now, label="Prazo"),#datetime.date.today()
Field('concluido', label='Concluído', widget=SQLFORM.widgets.radio.widget,
requires = IS_IN_SET(['NÃO', 'SIM']), default='NÃO'),
auth.signature,
format = '%(titulo)s'
)
STATEND = db.define_table('status_atend',
Field('status'),
format = '%(status)s'
)
ATEND = db.define_table('atendimentos',
# Field('data_atend', 'date', notnull=True, default=datetime.date.today(), label="Data"),
Field('cliente', notnull=True, label="Cliente"),
Field('contato', notnull=True, label="Contato"),
Field('telefone', notnull=False, default="Não informado", label="Telefone"),
Field('detalhes', 'text', notnull=True, label="Detalhes do Atendimento"),
Field('status', 'reference status_atend', requires=IS_IN_DB(db,db.status_atend.id, '%(status)s'),
notnull=False, label="Status"),
Field('abrir_chamado', notnull=False, widget=SQLFORM.widgets.checkboxes.widget,
requires = IS_IN_SET(['SIM', 'NÃO']), default='NÃO'),
auth.signature,
format = '%(cliente)s'
)
response.logo = A(IMG(_src=URL('static', 'images/logoSiteBest.png'), _href=URL('default', 'index')))
| 36.428571
| 100
| 0.673389
|
acfe5a2dfc5c1ee7c86106139ccad203eede6ed2
| 2,702
|
py
|
Python
|
homeassistant/components/rest/entity.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 11
|
2018-02-16T15:35:47.000Z
|
2020-01-14T15:20:00.000Z
|
homeassistant/components/rest/entity.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 87
|
2020-07-06T22:22:54.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/rest/entity.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11
|
2020-12-16T13:48:14.000Z
|
2022-02-01T00:28:05.000Z
|
"""The base entity for the rest component."""
from abc import abstractmethod
from typing import Any
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .data import RestData
class RestEntity(Entity):
"""A class for entities using DataUpdateCoordinator or rest data directly."""
def __init__(
self,
coordinator: DataUpdateCoordinator[Any],
rest: RestData,
name,
device_class,
resource_template,
force_update,
) -> None:
"""Create the entity that may have a coordinator."""
self.coordinator = coordinator
self.rest = rest
self._name = name
self._device_class = device_class
self._resource_template = resource_template
self._force_update = force_update
super().__init__()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor."""
return self._device_class
@property
def force_update(self):
"""Force update."""
return self._force_update
@property
def should_poll(self) -> bool:
"""Poll only if we do noty have a coordinator."""
return not self.coordinator
@property
def available(self):
"""Return the availability of this sensor."""
if self.coordinator and not self.coordinator.last_update_success:
return False
return self.rest.data is not None
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self._update_from_rest_data()
if self.coordinator:
self.async_on_remove(
self.coordinator.async_add_listener(self._handle_coordinator_update)
)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._update_from_rest_data()
self.async_write_ha_state()
async def async_update(self):
"""Get the latest data from REST API and update the state."""
if self.coordinator:
await self.coordinator.async_request_refresh()
return
if self._resource_template is not None:
self.rest.set_url(self._resource_template.async_render(parse_result=False))
await self.rest.async_update()
self._update_from_rest_data()
@abstractmethod
def _update_from_rest_data(self):
"""Update state from the rest data."""
| 30.022222
| 87
| 0.65359
|
acfe5c53c338f9fae073036dbf7c3a2939f275a0
| 5,063
|
py
|
Python
|
glassimaging/dataloading/brainmask.py
|
koenyyy/glassimaging
|
fade5e849f185e727bc0da6cfeb1eccb8994ca48
|
[
"Apache-2.0"
] | 2
|
2019-09-09T14:26:45.000Z
|
2020-02-27T10:42:49.000Z
|
glassimaging/dataloading/brainmask.py
|
koenyyy/glassimaging
|
fade5e849f185e727bc0da6cfeb1eccb8994ca48
|
[
"Apache-2.0"
] | null | null | null |
glassimaging/dataloading/brainmask.py
|
koenyyy/glassimaging
|
fade5e849f185e727bc0da6cfeb1eccb8994ca48
|
[
"Apache-2.0"
] | 4
|
2019-07-02T08:13:01.000Z
|
2020-08-30T06:52:48.000Z
|
# -*- coding: utf-8 -*-
import glob
import os
import pandas as pd
from glassimaging.dataloading.niftidataset import NiftiDataset
import logging
import json
import nibabel as nib
import numpy as np
from torch.utils.data import Dataset
class BrainmaskDataloader(NiftiDataset):
available_sequences = ['flair', 't1', 't1Gd', 't2']
SEQ_TRANSLATION = {'t1Gd': 't1gd.nii.gz', 't2': 't2.nii.gz', 'flair': 'flair.nii.gz', 't1': 't1.nii.gz'}
"""The image paths for each subject are stored at initialization
"""
def __init__(self, df=None, sequences=('t1', 'flair', 't2', 't1Gd')):
NiftiDataset.__init__(self)
if df is not None:
self.df = df
self.sequences = sequences
def importData(self, data_loc, nsplits=5):
subjects = os.listdir(data_loc)
self.loc = data_loc
df = pd.DataFrame(columns=['subject', 't1Gd', 't2', 'flair', 't1', 'seg'])
for s in subjects:
list_t1gd = glob.glob(os.path.join(data_loc, s, self.SEQ_TRANSLATION['t1Gd']))
list_t2 = glob.glob(os.path.join(data_loc, s, self.SEQ_TRANSLATION['t2']))
list_flair = glob.glob(os.path.join(data_loc, s, self.SEQ_TRANSLATION['flair']))
list_t1 = glob.glob(os.path.join(data_loc, s, self.SEQ_TRANSLATION['t1']))
list_seg = glob.glob(os.path.join(data_loc, s, 'brainmask.nii.gz'))
if len(list_t1gd) >= 1:
t1gd = list_t1gd[0]
else:
t1gd = ''
if len(list_t2) >= 1:
t2 = list_t2[0]
else:
t2 = ''
if len(list_flair) >= 1:
flair = list_flair[0]
else:
flair = ''
if len(list_t1) >= 1:
t1 = list_t1[0]
else:
t1 = ''
if len(list_seg) >= 1:
seg = list_seg[0]
else:
seg = ''
df = df.append({'subject': s, 't1Gd': t1gd, 't2': t2, 't1': t1, 'flair': flair,
'seg': seg, 'split': 0}, ignore_index=True)
self.df = df.set_index('subject')
for seq in self.sequences + ('seg',):
self.df = self.df.loc[self.df[seq] != '']
self.patients = self.df.index.values
self.createCVSplits(nsplits)
"""Create a datamanager object from the filesystem
"""
@staticmethod
def fromFile(loc, nsplits = 5, sequences=('t1', 'flair', 't2', 't1Gd')):
instance = BrainmaskDataloader(sequences=sequences)
instance.importData(loc, nsplits)
logging.info('Data loaded created from ' + loc + '.')
return instance
def setSplits(self, splits_file):
""" Load the information on cross-validation splits from a json file
"""
with open(splits_file, 'r') as file:
splits = json.load(file)
# Set all patient to split -1, so that only patients in the actual splits file are included
self.df['split'] = -1
for i in range(0, len(splits)):
for p in splits[i]:
self.df.at[p, 'split'] = i
def getDataset(self, splits=(), sequences = None, transform=None):
if len(splits) == 0:
splits = range(0, self.nsplits)
if sequences is None:
sequences = self.available_sequences
dataset = BrainmaskDataset(self.df.loc[[s in splits for s in self.df['split']]], sequences,
transform=transform)
return dataset
class BrainmaskDataset(NiftiDataset, Dataset):
def __init__(self, dataframe, sequences, transform=None):
Dataset.__init__(self)
NiftiDataset.__init__(self)
self.df = dataframe
self.sequences = sequences
self.patients = self.df.index.values
self.transform = transform
def __len__(self):
return len(self.patients)
def __getitem__(self, idx):
patientname = self.patients[idx]
image = self.loadSubjectImagesWithoutSeg(patientname, self.sequences, normalized=False)
seg = self.loadSegBinarize(self.df.loc[patientname]['seg']).astype(float)
for i in range(0, image.shape[0]):
img = image[i]
maxval = np.percentile(img, 99)
minval = np.percentile(img, 1)
img = np.clip(img, minval, maxval)
mean = np.mean(img)
std = np.std(img)
if std == 0:
raise ValueError('Standard deviation of image is zero')
img = (img - mean) / std
image[i] = img
sample = {'data': image, 'seg': seg, 'subject': patientname, 'seg_file':self.df.loc[patientname]['seg'], 'header_source': self.df.loc[patientname]['t1'],
't1_source': self.df.loc[patientname]['t1']}
if self.transform is not None:
sample = self.transform(sample)
return sample
def saveListOfPatients(self, path):
with open(path, 'w') as file:
json.dump(self.patients.tolist(), file)
| 36.164286
| 162
| 0.56587
|
acfe5cd619759d450788b537a799780efc34d605
| 4,707
|
py
|
Python
|
cors/cors_options.py
|
optimizely/cors-python
|
d3fd6ff20b5c1a05ede0950ce817e584bb969d81
|
[
"MIT"
] | null | null | null |
cors/cors_options.py
|
optimizely/cors-python
|
d3fd6ff20b5c1a05ede0950ce817e584bb969d81
|
[
"MIT"
] | null | null | null |
cors/cors_options.py
|
optimizely/cors-python
|
d3fd6ff20b5c1a05ede0950ce817e584bb969d81
|
[
"MIT"
] | 1
|
2021-02-14T11:58:36.000Z
|
2021-02-14T11:58:36.000Z
|
"""Options for configuring the CORS handler."""
import types
from cors import validators
ALL_ORIGINS = '*'
DEFAULT_METHODS = ['HEAD', 'GET', 'PUT', 'POST', 'DELETE']
class CorsOptions(object):
"""Stores options for configuring the CORS handler."""
def __init__(self,
allow_origins=True,
allow_credentials=False,
allow_methods=None,
allow_headers=True,
expose_headers=None,
max_age=None,
vary=None,
allow_non_cors_requests=True,
continue_on_error=False):
"""
allow_origins (Validator) - The origins that are allowed. Set to True
to allow all origins, or to a list of valid origins. Defaults to
True, which allows all origins, and appends the
Access-Control-Allow-Origin: * response header.
allow_credentials (bool) - Whether or not the app supports credentials.
If True, appends the Access-Control-Allow-Credentials: true header.
Defaults to False.
allow_methods (Validator) - The HTTP methods that are allowed. Set to
True to allow all methods, or to a list of allowed methods. Defauts to
['HEAD', 'GET', 'PUT', 'POST', 'DELETE'], which appends the
Access-Control-Allow-Methods: HEAD, GET, PUT, POST, DELETE response
header.
allow_headers (Validator) - The HTTP request headers that are allowed.
Set to True to allow all headers, or to a list of allowed headers.
Defaults to True, which appends the Access-Control-Allow-Headers
response header.
expose_headers (list of strings) - List of response headers to expose
to the client. Defaults to None. Appends the
Access-Control-Expose-Headers response header.
max_age (int) - The maximum time (in seconds) to cache the preflight
response. Defaults to None, which doesn't append any response headers.
Appends the Access-Control-Max-Age header when set.
vary (bool) - Set to True if the Vary: Origin header should be appended
to the response, False otherwise. The Vary header is useful when used
in conjunction with a list of valid origins, and tells downstream
proxy servers not to cache the response based on Origin. The default
value is False for '*' origins, True otherwise.
allow_non_cors_requests (bool) - Whether non-CORS requests should be
allowed. Defaults to True.
continue_on_error (bool) - Whether an invalid CORS request should
trigger an error, or continue processing. Defaults to False.
"""
self.origin_validator = validators.create(allow_origins)
if allow_methods is None:
allow_methods = DEFAULT_METHODS
self.methods_validator = validators.create(allow_methods)
if allow_headers is None:
allow_headers = []
self.headers_validator = validators.create(allow_headers)
self.allow_credentials = allow_credentials
if expose_headers is None:
expose_headers = []
self.expose_headers = expose_headers
if max_age and not isinstance(max_age, types.IntType):
raise TypeError('max_age must be an int.')
self.max_age = max_age
# The *_value properties below are the actual values to use in the
# Access-Control-Allow-* headers. Set to None if the value is based
# on the request and cannot be precalculated. Otherwise these values
# are set now.
# Only set the origin value if it is '*', since that is the only option
# that can be precalculated (The actual origin value depends on the
# request).
self.origin_value = None
if allow_origins is True:
self.origin_value = ALL_ORIGINS
# Only set the methods and headers if they are a list. If they are a
# list, the entire list is returned in the preflight response. If they
# are not a list (bool, regex, funciton, etc), then the request values
# are echoed back to the user (and the values below are set to None
# since they can't be precalculated).
self.methods_value = None
if isinstance(allow_methods, types.ListType):
self.methods_value = allow_methods
self.headers_value = None
if isinstance(allow_headers, types.ListType):
self.headers_value = allow_headers
if vary is None:
vary = True
self.vary = vary
self.allow_non_cors_requests = allow_non_cors_requests
self.continue_on_error = continue_on_error
| 40.230769
| 79
| 0.65307
|
acfe5d1bdcc0ba8282f2192c06fcb61beb67c432
| 6,692
|
py
|
Python
|
tutorials/Tutorial1_Basic_QA_Pipeline.py
|
IamMarcIvanov/haystack
|
4f30f038226886114087fa1369d7b86bafe63bc6
|
[
"Apache-2.0"
] | null | null | null |
tutorials/Tutorial1_Basic_QA_Pipeline.py
|
IamMarcIvanov/haystack
|
4f30f038226886114087fa1369d7b86bafe63bc6
|
[
"Apache-2.0"
] | null | null | null |
tutorials/Tutorial1_Basic_QA_Pipeline.py
|
IamMarcIvanov/haystack
|
4f30f038226886114087fa1369d7b86bafe63bc6
|
[
"Apache-2.0"
] | null | null | null |
# ## Task: Question Answering for Game of Thrones
#
# Question Answering can be used in a variety of use cases. A very common one: Using it to navigate through complex
# knowledge bases or long documents ("search setting").
#
# A "knowledge base" could for example be your website, an internal wiki or a collection of financial reports.
# In this tutorial we will work on a slightly different domain: "Game of Thrones".
#
# Let's see how we can use a bunch of Wikipedia articles to answer a variety of questions about the
# marvellous seven kingdoms...
import logging
import subprocess
import time
from haystack import Finder
from haystack.database.elasticsearch import ElasticsearchDocumentStore
from haystack.indexing.cleaning import clean_wiki_text
from haystack.indexing.utils import convert_files_to_dicts, fetch_archive_from_http
from haystack.reader.farm import FARMReader
from haystack.reader.transformers import TransformersReader
from haystack.utils import print_answers
from haystack.retriever.elasticsearch import ElasticsearchRetriever
LAUNCH_ELASTICSEARCH = True
# ## Document Store
#
# Haystack finds answers to queries within the documents stored in a `DocumentStore`. The current implementations of
# `DocumentStore` include `ElasticsearchDocumentStore`, `SQLDocumentStore`, and `InMemoryDocumentStore`.
#
# **Here:** We recommended Elasticsearch as it comes preloaded with features like full-text queries, BM25 retrieval,
# and vector storage for text embeddings.
# **Alternatives:** If you are unable to setup an Elasticsearch instance, then follow the Tutorial 3
# for using SQL/InMemory document stores.
# **Hint**:
# This tutorial creates a new document store instance with Wikipedia articles on Game of Thrones. However, you can
# configure Haystack to work with your existing document stores.
#
# Start an Elasticsearch server
# You can start Elasticsearch on your local machine instance using Docker. If Docker is not readily available in
# your environment (eg., in Colab notebooks), then you can manually download and execute Elasticsearch from source.
if LAUNCH_ELASTICSEARCH:
logging.info("Starting Elasticsearch ...")
status = subprocess.run(
['docker run -d -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.6.2'], shell=True
)
if status.returncode:
raise Exception("Failed to launch Elasticsearch. If you want to connect to an existing Elasticsearch instance"
"then set LAUNCH_ELASTICSEARCH in the script to False.")
time.sleep(15)
# Connect to Elasticsearch
document_store = ElasticsearchDocumentStore(host="localhost", username="", password="", index="document")
# ## Cleaning & indexing documents
#
# Haystack provides a customizable cleaning and indexing pipeline for ingesting documents in Document Stores.
#
# In this tutorial, we download Wikipedia articles on Game of Thrones, apply a basic cleaning function, and index
# them in Elasticsearch.
# Let's first get some documents that we want to query
# Here: 517 Wikipedia articles for Game of Thrones
doc_dir = "data/article_txt_got"
s3_url = "https://s3.eu-central-1.amazonaws.com/deepset.ai-farm-qa/datasets/documents/wiki_gameofthrones_txt.zip"
fetch_archive_from_http(url=s3_url, output_dir=doc_dir)
# convert files to dicts containing documents that can be indexed to our datastore
dicts = convert_files_to_dicts(dir_path=doc_dir, clean_func=clean_wiki_text, split_paragraphs=True)
# You can optionally supply a cleaning function that is applied to each doc (e.g. to remove footers)
# It must take a str as input, and return a str.
# Now, let's write the docs to our DB.
document_store.write_documents(dicts)
# ## Initalize Retriever, Reader, & Finder
#
# ### Retriever
#
# Retrievers help narrowing down the scope for the Reader to smaller units of text where a given question
# could be answered.
#
# They use some simple but fast algorithm.
# **Here:** We use Elasticsearch's default BM25 algorithm
# **Alternatives:**
# - Customize the `ElasticsearchRetriever`with custom queries (e.g. boosting) and filters
# - Use `EmbeddingRetriever` to find candidate documents based on the similarity of
# embeddings (e.g. created via Sentence-BERT)
# - Use `TfidfRetriever` in combination with a SQL or InMemory Document store for simple prototyping and debugging
retriever = ElasticsearchRetriever(document_store=document_store)
# Alternative: An in-memory TfidfRetriever based on Pandas dataframes for building quick-prototypes
# with SQLite document store.
#
# from haystack.retriever.tfidf import TfidfRetriever
# retriever = TfidfRetriever(document_store=document_store)
# ### Reader
#
# A Reader scans the texts returned by retrievers in detail and extracts the k best answers. They are based
# on powerful, but slower deep learning models.
#
# Haystack currently supports Readers based on the frameworks FARM and Transformers.
# With both you can either load a local model or one from Hugging Face's model hub (https://huggingface.co/models).
# **Here:** a medium sized RoBERTa QA model using a Reader based on
# FARM (https://huggingface.co/deepset/roberta-base-squad2)
# **Alternatives (Reader):** TransformersReader (leveraging the `pipeline` of the Transformers package)
# **Alternatives (Models):** e.g. "distilbert-base-uncased-distilled-squad" (fast) or
# "deepset/bert-large-uncased-whole-word-masking-squad2" (good accuracy)
# **Hint:** You can adjust the model to return "no answer possible" with the no_ans_boost. Higher values mean
# the model prefers "no answer possible"
#
# #### FARMReader
# Load a local model or any of the QA models on
# Hugging Face's model hub (https://huggingface.co/models)
reader = FARMReader(model_name_or_path="deepset/roberta-base-squad2", use_gpu=False)
# #### TransformersReader
# Alternative:
# reader = TransformersReader(
# model="distilbert-base-uncased-distilled-squad", tokenizer="distilbert-base-uncased", use_gpu=-1)
# ### Finder
#
# The Finder sticks together reader and retriever in a pipeline to answer our actual questions.
finder = Finder(reader, retriever)
# ## Voilà! Ask a question!
# You can configure how many candidates the reader and retriever shall return
# The higher top_k_retriever, the better (but also the slower) your answers.
prediction = finder.get_answers(question="Who is the father of Arya Stark?", top_k_retriever=10, top_k_reader=5)
# prediction = finder.get_answers(question="Who created the Dothraki vocabulary?", top_k_reader=5)
# prediction = finder.get_answers(question="Who is the sister of Sansa?", top_k_reader=5)
print_answers(prediction, details="minimal")
| 45.52381
| 118
| 0.771518
|
acfe5d9440df81da0e58194ca2c9931db2f28bca
| 1,113
|
py
|
Python
|
icecaps/estimators/persona_seq2seq_estimator.py
|
entn-at/icecaps
|
caba26258e2054f239f39545fae97c9e67da789f
|
[
"MIT"
] | 277
|
2019-07-31T17:23:42.000Z
|
2022-01-28T11:19:16.000Z
|
icecaps/estimators/persona_seq2seq_estimator.py
|
arita37/icecaps
|
caba26258e2054f239f39545fae97c9e67da789f
|
[
"MIT"
] | 15
|
2019-09-04T14:28:38.000Z
|
2022-03-31T09:18:11.000Z
|
icecaps/estimators/persona_seq2seq_estimator.py
|
arita37/icecaps
|
caba26258e2054f239f39545fae97c9e67da789f
|
[
"MIT"
] | 47
|
2019-08-29T19:54:48.000Z
|
2021-09-18T05:30:07.000Z
|
import tensorflow as tf
import copy
from icecaps.estimators.estimator_chain import EstimatorChain
from icecaps.estimators.seq2seq_encoder_estimator import Seq2SeqEncoderEstimator
from icecaps.estimators.persona_seq2seq_decoder_estimator import PersonaSeq2SeqDecoderEstimator
class PersonaSeq2SeqEstimator(EstimatorChain):
def __init__(self, model_dir="/tmp", params=dict(), config=None, scope="", is_mmi_model=False):
self.encoder = Seq2SeqEncoderEstimator(
model_dir, params, config=config, scope=scope+"/encoder")
self.decoder = PersonaSeq2SeqDecoderEstimator(
model_dir, params, config=config, scope=scope+"/decoder", is_mmi_model=is_mmi_model)
super().__init__([self.encoder, self.decoder],
model_dir, params, config, scope)
@classmethod
def list_params(cls, expected_params=None):
print("Seq2Seq Encoder:")
Seq2SeqEncoderEstimator.list_params(expected_params)
print()
print("Persona Seq2Seq Decoder:")
PersonaSeq2SeqDecoderEstimator.list_params(expected_params)
print()
| 41.222222
| 99
| 0.734052
|
acfe5da808b1427a1334e7f1e39b2c4508cc5059
| 11,446
|
py
|
Python
|
OracleWebCenterSites/kubernetes/common/createFMWRestrictedJRFDomain.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | 20
|
2020-09-18T08:28:06.000Z
|
2021-11-04T11:48:53.000Z
|
OracleWebCenterSites/kubernetes/common/createFMWRestrictedJRFDomain.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | 17
|
2020-10-29T03:52:52.000Z
|
2022-03-29T06:47:05.000Z
|
OracleWebCenterSites/kubernetes/common/createFMWRestrictedJRFDomain.py
|
rishiagarwal-oracle/fmw-kubernetes
|
cf53d0aac782cacaa26cb1f8f1cdb7130f69d64f
|
[
"UPL-1.0",
"MIT"
] | 27
|
2020-04-30T09:06:37.000Z
|
2022-03-29T06:49:06.000Z
|
# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
import os
import sys
import com.oracle.cie.domain.script.jython.WLSTException as WLSTException
class Infra12213Provisioner:
MACHINES = {
'machine1' : {
'NMType': 'SSL',
'ListenAddress': 'localhost',
'ListenPort': 5658
}
}
JRF_12213_TEMPLATES = {
'baseTemplate' : '@@ORACLE_HOME@@/wlserver/common/templates/wls/wls.jar',
'extensionTemplates' : [
'@@ORACLE_HOME@@/oracle_common/common/templates/wls/oracle.jrf_restricted_template.jar',
'@@ORACLE_HOME@@/em/common/templates/wls/oracle.em_wls_restricted_template.jar'
],
'serverGroupsToTarget' : [ 'JRF-MAN-SVR', 'WSMPM-MAN-SVR' ]
}
def __init__(self, oracleHome, javaHome, domainParentDir, adminListenPort, adminName, managedNameBase, managedServerPort, prodMode, managedCount, clusterName):
self.oracleHome = self.validateDirectory(oracleHome)
self.javaHome = self.validateDirectory(javaHome)
self.domainParentDir = self.validateDirectory(domainParentDir, create=True)
return
def createInfraDomain(self, domainName, user, password, adminListenPort, adminName,
managedNameBase, managedServerPort, prodMode, managedCount, clusterName,
exposeAdminT3Channel=None, t3ChannelPublicAddress=None, t3ChannelPort=None):
domainHome = self.createBaseDomain(domainName, user, password, adminListenPort, adminName, managedNameBase,
managedServerPort, prodMode, managedCount, clusterName
)
self.extendDomain(domainHome, exposeAdminT3Channel, t3ChannelPublicAddress, t3ChannelPort)
def createBaseDomain(self, domainName, user, password, adminListenPort, adminName, managedNameBase, managedServerPort, prodMode, managedCount, clusterName):
baseTemplate = self.replaceTokens(self.JRF_12213_TEMPLATES['baseTemplate'])
readTemplate(baseTemplate)
setOption('DomainName', domainName)
setOption('JavaHome', self.javaHome)
if (prodMode == 'true'):
setOption('ServerStartMode', 'prod')
else:
setOption('ServerStartMode', 'dev')
set('Name', domainName)
admin_port = int(adminListenPort)
ms_port = int(managedServerPort)
ms_count = int(managedCount)
# Create Admin Server
# =======================
print 'Creating Admin Server...'
cd('/Servers/AdminServer')
#set('ListenAddress', '%s-%s' % (domain_uid, admin_server_name_svc))
set('ListenPort', admin_port)
set('Name', adminName)
# Define the user password for weblogic
# =====================================
cd('/Security/' + domainName + '/User/weblogic')
set('Name', user)
set('Password', password)
# Create a cluster
# ======================
print 'Creating cluster...'
cd('/')
cl=create(clusterName, 'Cluster')
# Create managed servers
for index in range(0, ms_count):
cd('/')
msIndex = index+1
cd('/')
name = '%s%s' % (managedNameBase, msIndex)
create(name, 'Server')
cd('/Servers/%s/' % name )
print('managed server name is %s' % name);
set('ListenPort', ms_port)
set('NumOfRetriesBeforeMSIMode', 0)
set('RetryIntervalBeforeMSIMode', 1)
set('Cluster', clusterName)
# Create Node Manager
# =======================
print 'Creating Node Managers...'
for machine in self.MACHINES:
cd('/')
create(machine, 'Machine')
cd('Machine/' + machine)
create(machine, 'NodeManager')
cd('NodeManager/' + machine)
for param in self.MACHINES[machine]:
set(param, self.MACHINES[machine][param])
setOption('OverwriteDomain', 'true')
domainHome = self.domainParentDir + '/' + domainName
print 'Will create Base domain at ' + domainHome
print 'Writing base domain...'
writeDomain(domainHome)
closeTemplate()
print 'Base domain created at ' + domainHome
return domainHome
def extendDomain(self, domainHome, exposeAdminT3Channel, t3ChannelPublicAddress,
t3ChannelPort):
print 'Extending domain at ' + domainHome
readDomain(domainHome)
setOption('AppDir', self.domainParentDir + '/applications')
print 'ExposeAdminT3Channel %s with %s:%s ' % (exposeAdminT3Channel, t3ChannelPublicAddress, t3ChannelPort)
if 'true' == exposeAdminT3Channel:
self.enable_admin_channel(t3ChannelPublicAddress, t3ChannelPort)
print 'Applying JRF templates...'
for extensionTemplate in self.JRF_12213_TEMPLATES['extensionTemplates']:
addTemplate(self.replaceTokens(extensionTemplate))
print 'Extension Templates added'
print 'Preparing to update domain...'
updateDomain()
print 'Domain updated successfully'
closeDomain()
return
###########################################################################
# Helper Methods #
###########################################################################
def validateDirectory(self, dirName, create=False):
directory = os.path.realpath(dirName)
if not os.path.exists(directory):
if create:
os.makedirs(directory)
else:
message = 'Directory ' + directory + ' does not exist'
raise WLSTException(message)
elif not os.path.isdir(directory):
message = 'Directory ' + directory + ' is not a directory'
raise WLSTException(message)
return self.fixupPath(directory)
def fixupPath(self, path):
result = path
if path is not None:
result = path.replace('\\', '/')
return result
def replaceTokens(self, path):
result = path
if path is not None:
result = path.replace('@@ORACLE_HOME@@', oracleHome)
return result
def enable_admin_channel(self, admin_channel_address, admin_channel_port):
if admin_channel_address == None or admin_channel_port == 'None':
return
cd('/')
admin_server_name = get('AdminServerName')
print('setting admin server t3channel for ' + admin_server_name)
cd('/Servers/' + admin_server_name)
create('T3Channel', 'NetworkAccessPoint')
cd('/Servers/' + admin_server_name + '/NetworkAccessPoint/T3Channel')
set('ListenPort', int(admin_channel_port))
set('PublicPort', int(admin_channel_port))
set('PublicAddress', admin_channel_address)
#############################
# Entry point to the script #
#############################
def usage():
print sys.argv[0] + ' -oh <oracle_home> -jh <java_home> -parent <domain_parent_dir> -name <domain-name> ' + \
'-user <domain-user> -password <domain-password> ' + \
'-rcuDb <rcu-database> -rcuPrefix <rcu-prefix> -rcuSchemaPwd <rcu-schema-password> ' \
'-adminListenPort <adminListenPort> -adminName <adminName> ' \
'-managedNameBase <managedNameBase> -managedServerPort <managedServerPort> -prodMode <prodMode> ' \
'-managedServerCount <managedCount> -clusterName <clusterName> ' \
'-exposeAdminT3Channel <quoted true or false> -t3ChannelPublicAddress <address of the cluster> ' \
'-t3ChannelPort <t3 channel port> '
sys.exit(0)
# Uncomment for Debug only
#print str(sys.argv[0]) + " called with the following sys.argv array:"
#for index, arg in enumerate(sys.argv):
# print "sys.argv[" + str(index) + "] = " + str(sys.argv[index])
if len(sys.argv) < 16:
usage()
#oracleHome will be passed by command line parameter -oh.
oracleHome = None
#javaHome will be passed by command line parameter -jh.
javaHome = None
#domainParentDir will be passed by command line parameter -parent.
domainParentDir = None
#domainUser is hard-coded to weblogic. You can change to other name of your choice. Command line paramter -user.
domainUser = 'weblogic'
#domainPassword will be passed by Command line parameter -password.
domainPassword = None
#rcuDb will be passed by command line parameter -rcuDb.
rcuDb = None
#change rcuSchemaPrefix to your infra schema prefix. Command line parameter -rcuPrefix.
rcuSchemaPrefix = 'DEV12'
#change rcuSchemaPassword to your infra schema password. Command line parameter -rcuSchemaPwd.
rcuSchemaPassword = None
exposeAdminT3Channel = None
t3ChannelPort = None
t3ChannelPublicAddress = None
i = 1
while i < len(sys.argv):
if sys.argv[i] == '-oh':
oracleHome = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-jh':
javaHome = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-parent':
domainParentDir = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-name':
domainName = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-user':
domainUser = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-password':
domainPassword = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-rcuDb':
rcuDb = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-rcuPrefix':
rcuSchemaPrefix = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-rcuSchemaPwd':
rcuSchemaPassword = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-adminListenPort':
adminListenPort = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-adminName':
adminName = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-managedNameBase':
managedNameBase = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-managedServerPort':
managedServerPort = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-prodMode':
prodMode = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-managedServerCount':
managedCount = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-clusterName':
clusterName = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-t3ChannelPublicAddress':
t3ChannelPublicAddress = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-t3ChannelPort':
t3ChannelPort = sys.argv[i + 1]
i += 2
elif sys.argv[i] == '-exposeAdminT3Channel':
exposeAdminT3Channel = sys.argv[i + 1]
i += 2
else:
print 'Unexpected argument switch at position ' + str(i) + ': ' + str(sys.argv[i])
usage()
sys.exit(1)
provisioner = Infra12213Provisioner(oracleHome, javaHome, domainParentDir, adminListenPort, adminName, managedNameBase, managedServerPort, prodMode, managedCount, clusterName)
provisioner.createInfraDomain(domainName, domainUser, domainPassword, adminListenPort, adminName, managedNameBase, managedServerPort, prodMode, managedCount,
clusterName, exposeAdminT3Channel, t3ChannelPublicAddress, t3ChannelPort)
| 39.19863
| 175
| 0.601171
|
acfe5db7c8ca3a7cb5be561059f1eceb46d2a4d7
| 747
|
py
|
Python
|
GEOS_Util/coupled_diagnostics/analysis/tseries/sst_gm.py
|
GEOS-ESM/GMAO_Shared
|
022af23abbc7883891006b57379be96d9a50df23
|
[
"NASA-1.3",
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-02-01T17:36:53.000Z
|
2020-02-01T17:36:53.000Z
|
GEOS_Util/coupled_diagnostics/analysis/tseries/sst_gm.py
|
GEOS-ESM/GMAO_Shared
|
022af23abbc7883891006b57379be96d9a50df23
|
[
"NASA-1.3",
"ECL-2.0",
"Apache-2.0"
] | 105
|
2019-07-08T19:27:23.000Z
|
2022-03-22T02:12:16.000Z
|
GEOS_Util/coupled_diagnostics/analysis/tseries/sst_gm.py
|
GEOS-ESM/GMAO_Shared
|
022af23abbc7883891006b57379be96d9a50df23
|
[
"NASA-1.3",
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-07-05T18:00:44.000Z
|
2022-03-11T16:26:29.000Z
|
#!/usr/bin/env python
import matplotlib
matplotlib.use('Agg')
import os, sys
import matplotlib.pyplot as pl
from matplotlib import dates
import g5lib.plotters as ptrs
from g5lib import g5dset
exp=g5dset.read_exp(sys.argv[1])
varname='TS'
exp.ctl=g5dset.Ctl(exp,'geosgcm_ocn2d')
exp.gm=exp.ctl(varname).aave(); exp.gm.data-=273.16
# Plot
path=exp.plot_path
try:
os.makedirs(path)
except OSError:
pass
pl.clf()
exp.gm.name=exp.ctl.name +' Global SST'
p=ptrs.Plotter1d()
p(exp.gm)
ax=p.axis
if(exp.gm.time.size > 2500):
myloc=dates.YearLocator((exp.gm.time.size/1000)*10)
ax.xaxis.set_major_locator(myloc)
ax.set_ylim((16,22)); ax.set_ylabel('T, $^0C$')
pl.grid(); pl.tight_layout(); pl.show()
pl.savefig(path+'/sst_gm.png')
| 19.153846
| 55
| 0.716198
|
acfe5dc7af8834b4d1adabbd93a270ec23ea7675
| 845
|
py
|
Python
|
docs/conf.py
|
ghuntley/rules_haskell
|
adc3503387fbb54173dc4b4f21ae0aefe33759a4
|
[
"Apache-2.0"
] | 222
|
2017-11-06T09:01:12.000Z
|
2022-03-28T08:24:22.000Z
|
docs/conf.py
|
ghuntley/rules_haskell
|
adc3503387fbb54173dc4b4f21ae0aefe33759a4
|
[
"Apache-2.0"
] | 1,168
|
2017-11-19T07:43:13.000Z
|
2022-03-31T12:40:39.000Z
|
docs/conf.py
|
ghuntley/rules_haskell
|
adc3503387fbb54173dc4b4f21ae0aefe33759a4
|
[
"Apache-2.0"
] | 94
|
2017-11-17T22:46:37.000Z
|
2022-03-15T00:16:56.000Z
|
project = 'rules_haskell'
copyright = '2018, The rules_haskell authors'
source_suffix = '.rst'
extensions = [
'sphinx.ext.graphviz',
'sphinx.ext.todo',
]
master_doc = 'index'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_theme_options = {
'show_powered_by': False,
'github_user': 'tweag',
'github_repo': 'rules_haskell',
'github_banner': True,
'github_type': "star",
'show_related': False,
'note_bg': '#FFF59C',
}
html_show_sphinx = False
todo_include_todos = True
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass).
latex_documents = [
(master_doc, 'rules_haskell.tex', 'rules\\_haskell Documentation',
'Tweag I/O', 'manual'),
]
| 20.119048
| 70
| 0.68284
|
acfe5e42fa391d9ac072e5a2e4d70c721cb543f8
| 8,265
|
py
|
Python
|
data/models/farmer.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 5
|
2019-08-29T13:55:47.000Z
|
2021-11-15T08:30:33.000Z
|
data/models/farmer.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 295
|
2019-08-19T12:40:29.000Z
|
2022-01-24T14:03:20.000Z
|
data/models/farmer.py
|
SIXMON/peps
|
48c09a951a0193ada7b91c8bb6efc4b1232c3520
|
[
"MIT"
] | 7
|
2020-05-27T06:28:48.000Z
|
2021-11-17T10:00:54.000Z
|
import uuid
from urllib.parse import quote
from django.utils import timezone
from django.contrib.auth import get_user_model
from django.db import models, connection
from django.db.models import JSONField
from django.utils.html import mark_safe
from django_better_admin_arrayfield.models.fields import ArrayField
from data.utils import optimize_image
from data.forms import ChoiceArrayField
PRODUCTIONS = (
('Grandes cultures', 'Grandes cultures'),
('Cultures industrielles', 'Cultures industrielles'),
('Élevage allaitant', 'Élevage allaitant'),
('Élevage laitier', 'Élevage laitier'),
('Élevage engraissement', 'Élevage engraissement'),
('Élevage poule pondeuses', 'Élevage poule pondeuses'),
('Cultures légumières', 'Cultures légumières'),
('Vigne', 'Vigne'),
('Cultures spécialisées', 'Cultures spécialisées'),
('Apiculture', 'Apiculture'),
('Autre', 'Autre'),
)
GROUPS = (
('DEPHY', 'DEPHY'),
('GIEE', 'GIEE'),
('30000', '30000'),
('CETA', 'CETA'),
('Groupe de coopérative', 'Groupe de coopérative'),
('Groupe de négoce', 'Groupe de négoce'),
('Groupe de chambre d\'agriculture', 'Groupe de chambre d\'agriculture'),
('Groupe de voisins', 'Groupe de voisins'),
('CUMA', 'CUMA'),
('Civam', 'Civam'),
('Autre', 'Autre'),
)
TYPE_AGRICULTURE = (
('Agriculture Biologique', 'Agriculture Biologique'),
('Agriculture de Conservation des Sols', 'Agriculture de Conservation des Sols'),
('Techniques Culturales Simplifiées', 'Techniques Culturales Simplifiées'),
('Labour occasionnel', 'Labour occasionnel'),
('Agroforesterie', 'Agroforesterie'),
('Conventionnel', 'Conventionnel'),
('Cahier des charges industriel', 'Cahier des charges industriel'),
('Label qualité', 'Label qualité'),
('Label environnemental (HVE)', 'Label environnemental (HVE)'),
('Autre', 'Autre'),
)
TYPE_LIVESTOCK = (
('Bovin', 'Bovin'),
('Ovin', 'Ovin'),
('Caprin', 'Caprin'),
('Avicole', 'Avicole'),
('Porcin', 'Porcin'),
('Autre', 'Autre'),
)
def get_next_increment():
with connection.cursor() as cursor:
cursor.execute("SELECT nextval('farmer_sequence_number')")
result = cursor.fetchone()
return result[0]
class Farmer(models.Model):
class Meta:
ordering = ['-creation_date']
# These two are unique values. UUIDs were chosen initially as IDs as they
# allow client ID generation, less issues when working with multiple DBs, etc.
# However, they are cumbersome to use on some situations (e.g., URLs), so we
# also benefit from a short sequential ID that uses a Postgres sequence.
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
sequence_number = models.IntegerField(default=get_next_increment, editable=False, unique=True)
#############################################################################
external_id = models.CharField(max_length=100, db_index=True, null=True)
user = models.OneToOneField(get_user_model(), on_delete=models.CASCADE, null=True)
approved = models.BooleanField(default=False, db_index=True)
modification_date = models.DateTimeField(auto_now=True)
creation_date = models.DateTimeField(default=timezone.now)
airtable_json = JSONField(null=True, blank=True)
airtable_url = models.TextField(null=True)
cgu_approved = models.BooleanField(default=False)
can_send_messages = models.BooleanField(default=False)
name = models.TextField(null=True, blank=True)
farm_name = models.TextField(null=True, blank=True)
email = models.EmailField(db_index=True, null=True, blank=True)
phone_number = models.CharField(max_length=50, null=True, blank=True)
installation_date = models.DateField(null=True, blank=True)
description = models.TextField(null=True, blank=True)
cultures = models.TextField(null=True, blank=True)
lat = models.DecimalField(null=True, blank=True, max_digits=9, decimal_places=6)
lon = models.DecimalField(null=True, blank=True, max_digits=9, decimal_places=6)
production = ChoiceArrayField(models.CharField(max_length=100, choices=PRODUCTIONS), default=list, null=True, blank=True)
groups = ChoiceArrayField(models.CharField(max_length=200, choices=GROUPS), default=list, null=True, blank=True)
agriculture_types = ChoiceArrayField(models.TextField(choices=TYPE_AGRICULTURE), default=list, null=True, blank=True)
profile_image = models.ImageField(null=True, blank=True)
postal_code = models.CharField(max_length=20, null=True, blank=True)
personnel = models.TextField(null=True, blank=True)
livestock_types = ChoiceArrayField(models.TextField(choices=TYPE_LIVESTOCK), default=list, null=True, blank=True)
livestock_number = models.TextField(null=True, blank=True)
soil_type = models.TextField(null=True, blank=True)
specificities = models.TextField(null=True, blank=True)
contact_possible = models.BooleanField(default=True)
email_for_messages_allowed = models.BooleanField(default=True)
links = ArrayField(models.TextField(), default=list, blank=True, null=True)
surface = models.TextField(null=True, blank=True)
surface_cultures = models.TextField(null=True, blank=True)
surface_meadows = models.TextField(null=True, blank=True)
output = models.TextField(null=True, blank=True)
onboarding_shown = models.BooleanField(default=False)
self_created = models.BooleanField(default=False)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if self.profile_image:
self.profile_image = optimize_image(self.profile_image, self.profile_image.name)
if self.email:
self.email = get_user_model().objects.normalize_email(self.email)
if self.can_send_messages:
for message in self.sent_messages.filter(pending_delivery=True):
message.pending_delivery = False
message.save()
message.send_email()
super(Farmer, self).save(force_insert, force_update, using, update_fields)
@property
def url_slug(self):
url_name = quote(self.farm_name or self.name)
return f'{url_name}--{self.sequence_number or ""}'
@property
def url_path(self):
url_name = quote(self.farm_name or self.name)
return f'/exploitation/{self.url_slug}'
@property
def html_link(self):
"""
This is used in the admin panel to link to the farmer's page
"""
if self.sequence_number and self.approved:
unescaped_url = f'/exploitation/{self.farm_name or self.name}--{self.sequence_number}'
return mark_safe(f'<a href="{self.url_path}" target="_blank">{unescaped_url}</a>')
else:
return 'Pas encore live'
def __str__(self):
return self.name
# This is sadly necessary because we can't use an ArrayField of ImageFields
# https://code.djangoproject.com/ticket/25756#no1
class FarmImage(models.Model):
farmer = models.ForeignKey(Farmer, related_name='images', on_delete=models.CASCADE, null=True)
image = models.ImageField()
label = models.TextField(null=True, blank=True)
copyright = models.TextField(null=True, blank=True)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
self.image = optimize_image(self.image, self.image.name)
super(FarmImage, self).save(force_insert, force_update, using, update_fields)
def create_user_if_needed(sender, instance, **kwargs):
if sender == Farmer and instance.email:
email = get_user_model().objects.normalize_email(instance.email)
existing_user = get_user_model().objects.filter(email=email).first()
if existing_user:
instance.user = existing_user
else:
random_password = get_user_model().objects.make_random_password()
new_user = get_user_model().objects.create_user(email=email, username=email, password=random_password)
instance.user = new_user
if sender == Farmer and not instance.email:
instance.user = None
models.signals.pre_save.connect(create_user_if_needed, sender=Farmer)
| 40.915842
| 125
| 0.697641
|
acfe5e49e4afbc279e91160ea9b864c1835ff9ef
| 31,263
|
py
|
Python
|
evaluate.py
|
earthobservatory/standard_product_completeness_evaluator
|
a3eb04bfb9d200615f0480bbd1032fe80026bd0f
|
[
"Apache-2.0"
] | null | null | null |
evaluate.py
|
earthobservatory/standard_product_completeness_evaluator
|
a3eb04bfb9d200615f0480bbd1032fe80026bd0f
|
[
"Apache-2.0"
] | null | null | null |
evaluate.py
|
earthobservatory/standard_product_completeness_evaluator
|
a3eb04bfb9d200615f0480bbd1032fe80026bd0f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''
Input are either an AOI or a GUNW/GUNW-merged. For a given input product, determines which GUNWs/GUNW merged
are complete along track over the AOI (or, if a GUNW, any AOIs). If there are complete
products, it tags the product with <aoi_name> tag, and creates an AOI_TRACK product
for all GUNWs along that track/orbit pairing.
'''
from __future__ import print_function
import re, sys, os
import json
import hashlib
import urllib3
import requests
import warnings
import dateutil
import dateutil.parser
from hysds.celery import app
import tagger
import traceback
import build_validated_product
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
AOI_TRACK_PREFIX = 'S1-GUNW-AOI_TRACK'
AOI_TRACK_MERGED_PREFIX = 'S1-GUNW-MERGED-AOI_TRACK'
AOI_TRACK_VERSION = 'v2.0'
S1_GUNW_VERSION = "v2.0.2"
S1_GUNW_MERGED_VERSION = "v2.0.2"
ALLOWED_PROD_TYPES = ['S1-GUNW', "S1-GUNW-MERGED", "area_of_interest", "S1-GUNW-GREYLIST"]
INDEX_MAPPING = {'S1-GUNW-acq-list': 'grq_*_s1-gunw-acq-list',
'S1-GUNW':'grq_*_s1-gunw',
'S1-GUNW-MERGED': 'grq_*_s1-*-merged',
'S1-GUNW-acqlist-audit_trail': 'grq_*_s1-gunw-acqlist-audit_trail',
'S1-GUNW-AOI_TRACK': 'grq_*_s1-gunw-aoi_track',
'S1-GUNW-MERGED-AOI_TRACK': 'grq_*_s1-gunw-merged-aoi_track',
'S1-GUNW-GREYLIST': 'grq_*_s1-gunw-greylist',
'area_of_interest': 'grq_*_area_of_interest'}
class evaluate():
'''evaluates input product for completeness. Tags GUNWs/GUNW-merged & publishes AOI_TRACK products'''
def __init__(self):
'''fill values from context, error if invalid inputs, then kickoff evaluation'''
self.ctx = load_context()
self.prod_type = self.ctx.get('prod_type', False)
self.track_number = self.ctx.get('track_number', False)
self.full_id_hash = self.ctx.get('full_id_hash', False)
self.uid = self.ctx.get('uid', False)
self.location = self.ctx.get('location', False)
self.starttime = self.ctx.get('starttime', False)
self.endtime = self.ctx.get('endtime', False)
self.version = self.ctx.get('version', False)
self.orbit_number = self.ctx.get('orbit_number', False)
self.s1_gunw_version = self.ctx.get("S1-GUNW-version", S1_GUNW_VERSION)
self.s1_gunw_merged_version = self.ctx.get("S1-GUNW-MERGED-version", S1_GUNW_MERGED_VERSION)
# exit if invalid input product type
if not self.prod_type in ALLOWED_PROD_TYPES:
raise Exception('input product type: {} not in allowed product types for PGE'.format(self.prod_type))
if not self.prod_type == 'area_of_interest' and not self.full_id_hash:
warnings.warn('Warning: full_id_hash not found in metadata. Will attempt to generate')
#if not self.prod_type is 'area_of_interest' and self.track_number is False:
# raise Exception('metadata.track_number not filled. Cannot evaluate.')
# run evaluation & publishing by job type
if self.prod_type == 'area_of_interest':
self.run_aoi_evaluation()
elif self.prod_type == 'S1-GUNW-GREYLIST':
self.run_greylist_evaluation()
else:
self.run_gunw_evaluation()
def run_aoi_evaluation(self):
'''runs the evaluation & publishing for an aoi'''
# get all audit_trail products over the aoi
audit_trail_list = get_objects('S1-GUNW-acqlist-audit_trail', aoi=self.uid)
# determine all full_id_hashes from all audit_trail products
full_id_hashes = list(sort_by_hash(audit_trail_list).keys())
# retrieve associated gunws from the full_id_hash list
s1_gunw = filter_hashes(get_objects('S1-GUNW', location=self.location, starttime=self.starttime, endtime=self.endtime), full_id_hashes)
s1_gunw_merged = filter_hashes(get_objects('S1-GUNW-MERGED', location=self.location, starttime=self.starttime, endtime=self.endtime), full_id_hashes)
# get all greylist hashes
greylist_hashes = sort_by_hash(get_objects('S1-GUNW-GREYLIST', location=self.location)).keys()
# get the full aoi product
aois = get_objects('area_of_interest', uid=self.uid, version=self.version)
if len(aois) > 1:
raise Exception('unable to distinguish between multiple AOIs with same uid but different version: {}}'.format(self.uid))
if len(aois) == 0:
raise Exception('unable to find referenced AOI: {}'.format(self.uid))
aoi = aois[0]
# get the matching acquisition list products
acq_list = self.get_matching_acq_lists(aoi, audit_trail_list, greylist_hashes)
for gunw_list in [s1_gunw, s1_gunw_merged]:
# evaluate to see which products are complete, tagging and publishing complete products
self.gen_completed(gunw_list, acq_list, aoi)
def run_greylist_evaluation(self):
'''runs the evaluation and publishing for a greylist'''
# fill the hash if it doesn't exist
if self.full_id_hash is False:
print('attempting to fill hash for submitted product...')
self.full_id_hash = gen_hash(get_objects(self.prod_type, uid=self.uid)[0])
print('Found hash {}'.format(self.full_id_hash))
# get all the greylists
greylist_hashes = sort_by_hash(get_objects('S1-GUNW-GREYLIST')).keys()
# determine which AOI(s) the gunw corresponds to
all_audit_trail = get_objects('S1-GUNW-acqlist-audit_trail', full_id_hash=self.full_id_hash)
audit_by_aoi = sort_by_aoi(all_audit_trail)
for aoi_id in audit_by_aoi.keys():
print('Evaluating associated GUNWs over AOI: {}'.format(aoi_id))
aois = get_objects('area_of_interest', uid=aoi_id)
if len(aois) > 1:
raise Exception('unable to distinguish between multiple AOIs with same uid but different version: {}}'.format(aoi_id))
if len(aois) == 0:
warnings.warn('unable to find referenced AOI: {}'.format(aoi_id))
continue
aoi = aois[0]
# get all audit-trail products that match orbit and track
matching_audit_trail_list = get_objects('S1-GUNW-acqlist-audit_trail', track_number=self.track_number, aoi=aoi_id)
print('Found {} audit trail products matching track: {}'.format(len(matching_audit_trail_list), self.track_number))
if len(matching_audit_trail_list) < 1:
continue
#get all acq-list products that match the audit trail
acq_lists = self.get_matching_acq_lists(aoi, matching_audit_trail_list, greylist_hashes)
if len(acq_lists) < 1:
print('Found {} acq-lists.'.format(len(acq_lists)))
continue
#filter invalid orbits
acq_lists = sort_by_orbit(acq_lists).get(stringify_orbit(self.orbit_number))
# get all associated gunw or gunw-merged products
gunws = get_objects('S1-GUNW', track_number=self.track_number, orbit_numbers=self.orbit_number, version=self.s1_gunw_version)
if len(gunws) < 1:
print("No S1-GUNW FOUND for track_number={}, orbit_numbers={}, s1-gunw-version={}".format(self.track_number, self.orbit_number, self.s1_gunw_version))
else:
# evaluate to determine which products are complete, tagging & publishing complete products
self.gen_completed(gunws, acq_lists, aoi)
gunws_merged = get_objects('S1-GUNW-MERGED', track_number=self.track_number, orbit_numbers=self.orbit_number, version=self.s1_gunw_merged_version)
if len(gunws_merged) < 1:
print("No S1-GUNW-MERGED FOUND for track_number={}, orbit_numbers={}, s1-gunw-version={}".format(self.track_number, self.orbit_number, self.s1_gunw_merged_version))
else:
# evaluate to determine which products are complete, tagging & publishing complete products
self.gen_completed(gunws_merged, acq_lists, aoi)
def run_gunw_evaluation(self):
'''runs the evaluation and publishing for a gunw or gunw-merged'''
# fill the hash if it doesn't exist
if self.full_id_hash is False:
print('attempting to fill hash for submitted product...')
self.full_id_hash = gen_hash(get_objects(self.prod_type, uid=self.uid)[0])
print('Found hash {}'.format(self.full_id_hash))
# get all the greylists
greylist_hashes = sort_by_hash(get_objects('S1-GUNW-GREYLIST')).keys()
# determine which AOI(s) the gunw corresponds to
all_audit_trail = get_objects('S1-GUNW-acqlist-audit_trail', full_id_hash=self.full_id_hash)
audit_by_aoi = sort_by_aoi(all_audit_trail)
for aoi_id in audit_by_aoi.keys():
print('Evaluating associated GUNWs over AOI: {}'.format(aoi_id))
aois = get_objects('area_of_interest', uid=aoi_id)
if len(aois) > 1:
raise Exception('unable to distinguish between multiple AOIs with same uid but different version: {}}'.format(aoi_id))
if len(aois) == 0:
warnings.warn('unable to find referenced AOI: {}'.format(aoi_id))
continue
aoi = aois[0]
# get all audit-trail products that match orbit and track
matching_audit_trail_list = get_objects('S1-GUNW-acqlist-audit_trail', track_number=self.track_number, aoi=aoi_id)
print('Found {} audit trail products matching track: {}'.format(len(matching_audit_trail_list), self.track_number))
if len(matching_audit_trail_list) < 1:
continue
#get all acq-list products that match the audit trail
acq_lists = self.get_matching_acq_lists(aoi, matching_audit_trail_list, greylist_hashes)
if len(acq_lists) < 1:
print('Found {} acq-lists.'.format(len(acq_lists)))
continue
#filter invalid orbits
print("self.orbit_number : {}".format(self.orbit_number))
acq_lists = sort_by_orbit(acq_lists).get(stringify_orbit(self.orbit_number))
# get all associated gunw or gunw-merged products
gunws = get_objects(self.prod_type, track_number=self.track_number, orbit_numbers=self.orbit_number, version=self.version)
# evaluate to determine which products are complete, tagging & publishing complete products
completed = self.gen_completed(gunws, acq_lists, aoi)
if not completed:
logger.info("Not Completed : {}".format(self.uid))
def gen_completed(self, gunws, acq_lists, aoi):
'''determines which gunws (or gunw-merged) products are complete along track & orbit,
tags and publishes TRACK_AOI products for those that are complete'''
complete = []
hashed_acq_dct = sort_duplicates_by_hash(acq_lists)
hashed_gunw_dct = sort_duplicates_by_hash(gunws) # iterates through the list & removes older gunws with duplicate full_id_hash
track_dct = sort_by_track(acq_lists)
for track in track_dct.keys():
track_list = track_dct.get(track, [])
orbit_dct = sort_by_orbit(track_list)
for orbit in orbit_dct.keys():
print('------------------------------')
orbit_list = orbit_dct.get(orbit, [])
print('Found {} ACQ-lists over aoi: {} & track: {} & orbit: {}'.format(len(orbit_list), aoi.get('_source').get('id'), track, orbit))
print('Evaluating GUNWs and Acquisitions over track: {} and orbit: {}'.format(track, orbit))
# get all full_id_hashes in the acquisition list
all_hashes = [get_hash(x) for x in orbit_list]
print('all relevant ids over AOI: {}'.format(', '.join([x.get('_source').get('id') for x in orbit_list])))
print('all relevant hashes over AOI: {}'.format(', '.join(all_hashes)))
# if all of them are in the list of gunw hashes, they are complete
complete = True
complete_acq_lists = []
incomplete_acq_lists = []
missing_hashes = []
for full_id_hash in all_hashes:
if hashed_gunw_dct.get(full_id_hash, False) is False:
complete = False
missing_hashes.append(full_id_hash)
print('hash: {} is missing... products are incomplete.'.format(full_id_hash))
incomplete_acq_lists.append(hashed_acq_dct.get(full_id_hash))
else:
complete_acq_lists.append(hashed_acq_dct.get(full_id_hash))
print('found {} complete and {} missing hashes.'.format(len(complete_acq_lists), len(incomplete_acq_lists)))
if not complete:
print('missing hashes: {}'.format(', '.join(missing_hashes)))
# tag acq-lists if iterating over gunws (not gunw merged')
if gunws[0].get('_type', False) == 'S1-GUNW':
print('tagging acq-lists appropriately')
for obj in complete_acq_lists:
tags = obj.get('_source', {}).get('metadata', {}).get('tags', [])
uid = obj.get('_source', {}).get('id', False)
if 'gunw_missing' in tags:
print('removing tag: "gunw_missing" from: {}'.format(uid))
self.remove_obj_tag(obj, 'gunw_missing')
if not 'gunw_generated' in tags:
print('adding tag: "gunw_generated" to: {}'.format(uid))
self.tag_obj(obj, 'gunw_generated')
for obj in incomplete_acq_lists:
tags = obj.get('_source', {}).get('metadata', {}).get('tags', [])
uid = obj.get('_source', {}).get('id', False)
if 'gunw_generated' in tags:
print('removing tag: "gunw_generated" from: {}'.format(uid))
self.remove_obj_tag(obj, 'gunw_generated')
if not 'gunw_missing' in tags:
print('adding tag: "gunw_missing" to: {}'.format(uid))
self.tag_obj(obj, 'gunw_missing')
# they are complete. tag & generate products
if complete:
gunw_list = []
for hsh in all_hashes:
gunw_list.append(hashed_gunw_dct.get(hsh))
print('found {} products complete over aoi: {} for track: {} and orbit: {}'.format(len(gunw_list), aoi.get('_id'), track, orbit))
self.tag_and_publish(gunw_list, aoi)
return True
else:
return False
def tag_and_publish(self, gunws, aoi):
'''tags each object in the input list, then publishes an appropriate
aoi-track product'''
if len(gunws) < 1:
return
if self.aoi_track_is_published(gunws, aoi.get('_source').get('id')):
print('AOI_TRACK product is already published... skipping.')
return
print('AOI_TRACK product has not been published. Publishing product...')
for obj in gunws:
tag = aoi.get('_source').get('id')
self.tag_obj(obj, tag)
prefix = AOI_TRACK_PREFIX
if gunws[0].get('_type') == 'S1-GUNW-MERGED':
prefix = AOI_TRACK_MERGED_PREFIX
build_validated_product.build(gunws, AOI_TRACK_VERSION, prefix, aoi, get_track(gunws[0]), get_orbit(gunws[0]))
def tag_obj(self, obj, tag):
'''tags the object with the given tag'''
uid = obj.get('_source').get('id')
prod_type = obj.get('_type')
index = obj.get('_index')
tagger.add_tag(index, uid, prod_type, tag)
def remove_obj_tag(self, obj, tag):
'''removes the tag from the given object'''
uid = obj.get('_source').get('id')
prod_type = obj.get('_type')
index = obj.get('_index')
tagger.remove_tag(index, uid, prod_type, tag)
def get_matching_acq_lists(self, aoi, audit_trail_list, greylist_hashes):
'''returns all acquisition lists matching the audit trail products under the given aoi'''
aoi_met = aoi.get('_source', {}).get('metadata', {})
start = aoi_met.get('starttime', False)
end = aoi_met.get('endtime', False)
location = aoi.get('_source', {}).get('location', False)
audit_dct = sort_by_hash(audit_trail_list)
matching = []
all_acq_lists = get_objects('S1-GUNW-acq-list', starttime=start, endtime=end, location=location)
for acq_list in all_acq_lists:
hsh = get_hash(acq_list)
if audit_dct.get(hsh, False) and hsh not in greylist_hashes:
matching.append(acq_list)
return matching
def aoi_track_is_published(self, gunws, aoi_id):
'''determines if the aoi_track product is published already. Returns True/False'''
gunw = gunws[-1]
gunw_type = gunw.get('_type')
prod_type = 'S1-GUNW-AOI_TRACK'
if gunw_type == 'S1-GUNW-MERGED':
prod_type = 'S1-GUNW-MERGED-AOI_TRACK'
matches = get_objects(prod_type, track_number=get_track(gunw), orbit_numbers=gunw.get('_source').get('metadata').get('orbit_number'), aoi=aoi_id)
if matches:
return True
return False
def get_objects(prod_type, location=False, starttime=False, endtime=False, full_id_hash=False, track_number=False, orbit_numbers=False, version=False, uid=False, aoi=False):
'''returns all objects of the object type that intersect both
temporally and spatially with the aoi'''
idx = INDEX_MAPPING.get(prod_type) # mapping of the product type to the index
print_query(prod_type, location, starttime, endtime, full_id_hash, track_number, orbit_numbers, version, uid, aoi)
grq_ip = app.conf['GRQ_ES_URL'].replace(':9200', '').replace('http://', 'https://')
grq_url = '{0}/es/{1}/_search'.format(grq_ip, idx)
filtered = {}
must = []
if location:
filtered["query"] = {"geo_shape": {"location": {"shape": location}}}
if starttime or endtime or full_id_hash or track_number or version or uid or aoi:
must = []
if starttime:
must.append({"range": {"endtime": {"from": starttime}}})
if endtime:
must.append({"range": {"starttime": {"to": endtime}}})
if full_id_hash:
if isinstance(full_id_hash, list):
must.append({"terms": {"metadata.full_id_hash.raw": full_id_hash}})
else:
must.append({"term": {"metadata.full_id_hash.raw": full_id_hash}})
if track_number:
must.append({"match_phrase": {"metadata.track_number": track_number}})
if version:
must.append({"term": {"version.raw": version}})
if uid:
must.append({"term": {"id.raw": uid}})
if orbit_numbers:
#determine the proper field type & append all orbits
orbit_term = resolve_orbit_field(prod_type)
if isinstance(orbit_term, list):
# reference/secondary orbits which need to be specified separately
must.append({"term":{"metadata.{}".format(orbit_term[0]): sorted(orbit_numbers)[0]}})
must.append({"term":{"metadata.{}".format(orbit_term[1]): sorted(orbit_numbers)[1]}})
else:
for orbit in orbit_numbers:
must.append({"term":{"metadata.{}".format(orbit_term): orbit}})
if aoi:
must.append({"match_phrase": {"metadata.aoi.raw": aoi}})
filtered["filter"] = {"bool":{"must":must}}
if location:
grq_query = {"query": {"filtered": filtered}, "from": 0, "size": 1000}
else:
grq_query = {"query": {"bool":{"must": must}}}
#print(grq_url)
#print(grq_query)
results = query_es(grq_url, grq_query)
# if it's an orbit, filter out the bad orbits client-side
#if orbit_numbers:
# orbit_key = stringify_orbit(orbit_numbers)
# results = sort_by_orbit(results).get(orbit_key, [])
print('found {} {} products matching query.'.format(len(results), prod_type))
if prod_type in ["S1-GUNW-acqlist-audit_trail", "S1-GUNW-acq-list"] and len(results) == 0:
raise RuntimeError("0 matching found for {} with full_id_hash {} in {} with query :\n{}".format(prod_type, full_id_hash, grq_url, json.dumps(grq_query)))
#print(results)
return results
def print_query(prod_type, location=False, starttime=False, endtime=False, full_id_hash=False, track_number=False, orbit_numbers=False, version=False, uid=False, aoi=False):
'''print statement describing grq query'''
statement = 'Querying for products of type: {}'.format(prod_type)
if location:
statement += '\nwith location: {}'.format(location)
if starttime:
statement += '\nwith starttime: {}'.format(starttime)
if endtime:
statement += '\nwith endtime : {}'.format(endtime)
if full_id_hash:
statement += '\nwith full_id_hash: {}'.format(full_id_hash)
if track_number:
statement += '\nwith track_number: {}'.format(track_number)
if orbit_numbers:
statement += '\nwith orbits: {}'.format(', '.join([str(x) for x in orbit_numbers]))
if version:
statement += '\nwith version: {}'.format(version)
if uid:
statement += '\nwith uid: {}'.format(uid)
if aoi:
statement += '\nwith metadata.aoi: {}'.format(aoi)
print(statement)
def load_context():
'''loads the context file into a dict'''
try:
context_file = '_context.json'
with open(context_file, 'r') as fin:
context = json.load(fin)
return context
except:
raise Exception('unable to parse _context.json from work directory')
def query_es(grq_url, es_query):
'''
Runs the query through Elasticsearch, iterates until
all results are generated, & returns the compiled result
'''
print("query_es query: \n{}".format(json.dumps(es_query)))
if 'size' in es_query.keys():
iterator_size = es_query['size']
else:
iterator_size = 10
es_query['size'] = iterator_size
if 'from' in es_query.keys():
from_position = es_query['from']
else:
from_position = 0
es_query['from'] = from_position
response = requests.post(grq_url, data=json.dumps(es_query), verify=False)
response.raise_for_status()
results = json.loads(response.text, encoding='ascii')
results_list = results.get('hits', {}).get('hits', [])
total_count = results.get('hits', {}).get('total', 0)
for i in range(iterator_size, total_count, iterator_size):
es_query['from'] = i
response = requests.post(grq_url, data=json.dumps(es_query), timeout=60, verify=False)
response.raise_for_status()
results = json.loads(response.text, encoding='ascii')
results_list.extend(results.get('hits', {}).get('hits', []))
return results_list
def sort_by_orbit(es_result_list):
'''
Goes through the objects in the result list, and places them in an dict where key is orbit
'''
sorted_dict = {}
for result in es_result_list:
orbit = get_orbit(result)
if orbit in sorted_dict.keys():
sorted_dict[orbit].append(result)
else:
sorted_dict[orbit] = [result]
print("sort_by_orbit : orbits found : {}".format(sorted_dict.keys()))
return sorted_dict
def sort_by_hash(es_results_list):
'''
Goes through the objects in the result list, and places them in an dict where key is full_id_hash (or generated version of hash)
'''
sorted_dict = {}
for result in es_results_list:
idhash = get_hash(result)
if idhash in sorted_dict.keys():
sorted_dict.get(idhash, []).append(result)
else:
sorted_dict[idhash] = [result]
print("sort_by_hash : hash found : {}".format(sorted_dict.keys()))
return sorted_dict
def sort_by_track(es_result_list):
'''
Goes through the objects in the result list, and places them in an dict where key is track
'''
#print('found {} results'.format(len(es_result_list)))
sorted_dict = {}
for result in es_result_list:
track = get_track(result)
if track in sorted_dict.keys():
sorted_dict.get(track, []).append(result)
else:
sorted_dict[track] = [result]
print("sort_by_track : tracks found : {}".format(sorted_dict.keys()))
return sorted_dict
def sort_by_aoi(es_result_list):
'''
Goes through the objects in the result list, and places them in an dict where key is aoi_id
'''
#print('found {} results'.format(len(es_result_list)))
sorted_dict = {}
for result in es_result_list:
aoi_ids = result.get('_source', {}).get('metadata', {}).get('aoi', False)
if not aoi_ids:
continue
if isinstance(aoi_ids, list) or isinstance(aoi_ids, tuple):
for aoi_id in aoi_ids:
if aoi_id in sorted_dict.keys():
sorted_dict.get(aoi_id, []).append(result)
else:
sorted_dict[aoi_id] = [result]
else:
if aoi_ids in sorted_dict.keys():
sorted_dict.get(aoi_ids, []).append(result)
else:
sorted_dict[aoi_ids] = [result]
print("sort_by_aoi : aois found : {}".format(sorted_dict.keys()))
return sorted_dict
def get_track(es_obj):
'''returns the track from the elasticsearch object'''
es_ds = es_obj.get('_source', {})
#iterate through ds
track_met_options = ['track_number', 'track', 'trackNumber', 'track_Number']
for tkey in track_met_options:
track = es_ds.get(tkey, False)
if track:
return track
#if that doesn't work try metadata
es_met = es_ds.get('metadata', {})
for tkey in track_met_options:
track = es_met.get(tkey, False)
if track:
return track
raise Exception('unable to find track for: {}'.format(es_obj.get('_id', '')))
def get_orbit(es_obj):
'''returns the orbit as a string from the elasticsearch object'''
es_ds = es_obj.get('_source', {})
#iterate through ds
options = ['orbit_number', 'orbitNumber', 'orbit']
for tkey in options:
orbit = es_ds.get(tkey, False)
if orbit:
return stringify_orbit(orbit)
#if that doesn't work try metadata
es_met = es_ds.get('metadata', {})
for tkey in options:
orbit = es_met.get(tkey, False)
if orbit:
return stringify_orbit(orbit)
raise Exception('unable to find orbit for: {}'.format(es_obj.get('_id', '')))
def get_hash(es_obj):
'''retrieves the full_id_hash. if it doesn't exists, it
attempts to generate one'''
full_id_hash = es_obj.get('_source', {}).get('metadata', {}).get('full_id_hash', False)
if full_id_hash:
return full_id_hash
return gen_hash(es_obj)
def gen_hash(es_obj):
'''copy of hash used in the enumerator'''
met = es_obj.get('_source', {}).get('metadata', {})
master_slcs = met.get('master_scenes', met.get('reference_scenes', False))
slave_slcs = met.get('slave_scenes', met.get('secondary_scenes', False))
master_ids_str = ""
slave_ids_str = ""
for slc in sorted(master_slcs):
if isinstance(slc, tuple) or isinstance(slc, list):
slc = slc[0]
if master_ids_str == "":
master_ids_str = slc
else:
master_ids_str += " "+slc
for slc in sorted(slave_slcs):
if isinstance(slc, tuple) or isinstance(slc, list):
slc = slc[0]
if slave_ids_str == "":
slave_ids_str = slc
else:
slave_ids_str += " "+slc
id_hash = hashlib.md5(json.dumps([master_ids_str, slave_ids_str]).encode("utf8")).hexdigest()
return id_hash
def sort_duplicates_by_hash(es_results_list):
'''
iterates through the list of products and removes products with duplicate full_id_hash
(using creation time) and returns a dict sorted by full_id_hash.
'''
sorted_dict = {}
for result in es_results_list:
idhash = get_hash(result)
if idhash in sorted_dict.keys():
latest_result = get_most_recent(result, sorted_dict.get(idhash))
print('found duplicate gunws: {}, {}'.format(result.get('_source').get('id'), sorted_dict.get(idhash).get('_source').get('id')))
sorted_dict[idhash] = latest_result
else:
sorted_dict[idhash] = result
return sorted_dict
def filter_hashes(es_results_list, full_id_hash_list):
'''
filters out objects in the es_results_list that don't contain a
full_id_hash from full_id_hash_list. Returns the filtered list.
'''
filtered_list = []
for es_result in es_results_list:
hsh = get_hash(es_result)
if hsh in full_id_hash_list:
filtered_list.append(es_result)
return filtered_list
def get_most_recent(obj1, obj2):
'''returns the object with the most recent ingest time'''
ctime1 = dateutil.parser.parse(obj1.get('_source', {}).get('creation_timestamp', False))
ctime2 = dateutil.parser.parse(obj2.get('_source', {}).get('creation_timestamp', False))
if ctime1 > ctime2:
return obj1
return obj2
def resolve_orbit_field(prod_type):
'''resolves the orbit metadata field by product type'''
orbit_mapping = {'S1-GUNW-acq-list': 'orbitNumber',
'S1-GUNW':'orbit_number',
'S1-GUNW-MERGED': 'orbit_number',
'S1-GUNW-acqlist-audit_trail': ['secondary_orbit.raw', 'reference_orbit.raw'],
'S1-GUNW-AOI_TRACK': 'orbit',
'S1-GUNW-MERGED-AOI_TRACK': 'orbit'}
return orbit_mapping.get(prod_type, False)
def stringify_orbit(orbit_list):
'''converts the list into a string'''
if len(orbit_list) == 0:
raise RuntimeError("Orbit List is EMPTY")
return '_'.join([str(x).zfill(3) for x in sorted(orbit_list)])
def get_version(es_obj):
'''returns the version of the index. Since we are ignoring the subversions, only returns the main version.
eg, v2.0.1 returns v2.0'''
match = re.search(r'^([v]*?)([0-9])*\.([0-9])*[\.]{0,1}([0-9]){0,1}', es_obj.get('_source', {}).get('version', False))
version = '{}{}.{}'.format(match.group(1), match.group(2), match.group(3))
return version
if __name__ == '__main__':
try:
evaluate()
except Exception as e:
with open('_alt_error.txt', 'w') as f:
f.write("%s\n" % str(e))
with open('_alt_traceback.txt', 'w') as f:
f.write("%s\n" % traceback.format_exc())
raise
sys.exit(0)
| 48.620529
| 180
| 0.624892
|
acfe5fe3a35f4d19d1041e7e5344a9b5914b4851
| 610
|
py
|
Python
|
target_snowflake/upload_clients/base_upload_client.py
|
dext/pipelinewise-target-snowflake
|
8e92166771dcc35302c5d44cc60660cbefdd3564
|
[
"Apache-2.0"
] | null | null | null |
target_snowflake/upload_clients/base_upload_client.py
|
dext/pipelinewise-target-snowflake
|
8e92166771dcc35302c5d44cc60660cbefdd3564
|
[
"Apache-2.0"
] | null | null | null |
target_snowflake/upload_clients/base_upload_client.py
|
dext/pipelinewise-target-snowflake
|
8e92166771dcc35302c5d44cc60660cbefdd3564
|
[
"Apache-2.0"
] | null | null | null |
"""
Base class for upload clients
"""
from abc import ABC, abstractmethod
from singer import get_logger
class BaseUploadClient(ABC):
"""
Abstract class for upload clients
"""
def __init__(self, connection_config):
self.connection_config = connection_config
self.logger = get_logger('target_snowflake')
@abstractmethod
def upload_file(self, file: str, stream: str, temp_dir: str = None) -> None:
"""
Upload file
"""
@abstractmethod
def delete_objects(self, stream: str, key: str) -> None:
"""
Delete object
"""
| 22.592593
| 80
| 0.627869
|
acfe60c22ccc60a0dcf7bc33493ce8f3aea7f7cb
| 3,290
|
py
|
Python
|
api-client/pangea_api/contrib/tagging/tag.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | null | null | null |
api-client/pangea_api/contrib/tagging/tag.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | 27
|
2020-03-26T02:55:12.000Z
|
2022-03-12T00:55:04.000Z
|
api-client/pangea_api/contrib/tagging/tag.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | 1
|
2021-09-14T08:15:54.000Z
|
2021-09-14T08:15:54.000Z
|
from ...remote_object import RemoteObject
from ...sample import Sample
from ...sample_group import SampleGroup
from ...blob_constructors import sample_from_blob, sample_group_from_blob
from ...utils import paginated_iterator
class Tag(RemoteObject):
remote_fields = [
'uuid',
'created_at',
'updated_at',
'name',
'payload',
]
parent_field = None
def __init__(self, knex, name, payload=""):
super().__init__(self)
self.knex = knex
self.name = name
self.payload = payload
def _save(self):
data = {
field: getattr(self, field)
for field in self.remote_fields if hasattr(self, field)
}
url = f'contrib/tags/{self.uuid}'
self.knex.put(url, json=data)
def _get(self):
"""Fetch the result from the server."""
blob = self.get_cached_blob()
if not blob:
blob = self.knex.get(f'contrib/tags/name/{self.name}')
self.load_blob(blob)
self.cache_blob(blob)
else:
self.load_blob(blob)
def _create(self):
data = {
'name': self.name,
'payload': self.payload,
}
url = 'contrib/tags/?format=json'
blob = self.knex.post(url, json=data)
self.load_blob(blob)
def __call__(self, other, payload=""):
return self.tag(other, payload=payload)
def tag(self, other, payload=""):
self.idem()
if isinstance(other, Tag):
return self._tag_tag(other, payload=payload)
if isinstance(other, Sample):
return self._tag_sample(other, payload=payload)
if isinstance(other, SampleGroup):
return self._tag_sample_group(other, payload=payload)
def _tag_tag(self, tag, payload=""):
url = f'contrib/tags/{self.uuid}/tags'
data = {'tag_uuid': tag.uuid, 'payload': payload}
self.knex.post(url, json=data)
def _tag_sample(self, sample, payload=""):
url = f'contrib/tags/{self.uuid}/samples'
data = {'sample_uuid': sample.uuid, 'payload': payload}
self.knex.post(url, json=data)
def _tag_sample_group(self, sample_group, payload=""):
url = f'contrib/tags/{self.uuid}/sample_groups'
data = {'sample_group_uuid': sample_group.uuid, 'payload': payload}
self.knex.post(url, json=data)
def get_samples(self):
url = f'contrib/tags/{self.uuid}/samples'
for sample_blob in paginated_iterator(self.knex, url):
yield sample_from_blob(self.knex, sample_blob)
def get_sample_groups(self):
url = f'contrib/tags/{self.uuid}/sample_groups'
for sample_group_blob in paginated_iterator(self.knex, url):
yield sample_group_from_blob(self.knex, sample_group_blob)
def get_random_samples(self, n=100):
url = f'contrib/tags/{self.uuid}/random_samples?n={n}'
response = self.knex.get(url)
for sample_blob in response['results']:
yield sample_from_blob(self.knex, sample_blob)
def __str__(self):
return f'<Pangea::Contrib::Tag {self.name} {self.uuid} />'
def __repr__(self):
return str(self)
def pre_hash(self):
return 'TAG' + self.name
| 31.941748
| 75
| 0.609726
|
acfe60c34889e4194ab036c7583edae7109958f1
| 712
|
py
|
Python
|
greenberry/math.py
|
5aumy4/greenBerry
|
d0e292bdb19a554355a0b022118bbfb92f8eba87
|
[
"Apache-2.0"
] | 56
|
2018-01-01T16:12:03.000Z
|
2022-01-19T06:32:13.000Z
|
greenberry/math.py
|
5aumy4/greenBerry
|
d0e292bdb19a554355a0b022118bbfb92f8eba87
|
[
"Apache-2.0"
] | 49
|
2018-01-02T09:04:40.000Z
|
2022-02-14T14:44:55.000Z
|
greenberry/math.py
|
5aumy4/greenBerry
|
d0e292bdb19a554355a0b022118bbfb92f8eba87
|
[
"Apache-2.0"
] | 50
|
2018-01-01T17:01:17.000Z
|
2022-02-14T14:45:28.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 9 11:02:15 2018
@author: KraftyCoder
Notes : All Rights Reserved
see theory_notes_simple.py in folder 'greenberry'
"""
L_User = "dear berry"
class M: # math
ODD = "odd"
EVEN = "even"
PRIME = "prime"
PI = "pi"
LOG = "log"
LOG10 = "log10" # log(x) returns the logarithm value of x
SQROOT = "sqroot" # sqroot(x) should return square root value of x
SQ = "square" # square(x) returns square value of x
TAN = "tan" # tan(x) returns tangent value of x
COS = "cos" # cos(x) returns cosine value of x
SIN = "sin" # sin(x) returns sine value of x
HYP = "hyp" # returns sqroot(x**2 + y**2), takes in two parameters
| 27.384615
| 71
| 0.613764
|
acfe616ff416c15338d2bbfce7e7e3f3e756beb4
| 10,338
|
py
|
Python
|
custom_components/sonoff/sonoff_main.py
|
womeimingzi11/SonoffLAN
|
a28d69faf5a4d881d7bfd081727a1e27be154e9e
|
[
"MIT"
] | 1
|
2021-03-27T17:32:12.000Z
|
2021-03-27T17:32:12.000Z
|
custom_components/sonoff/sonoff_main.py
|
womeimingzi11/SonoffLAN
|
a28d69faf5a4d881d7bfd081727a1e27be154e9e
|
[
"MIT"
] | null | null | null |
custom_components/sonoff/sonoff_main.py
|
womeimingzi11/SonoffLAN
|
a28d69faf5a4d881d7bfd081727a1e27be154e9e
|
[
"MIT"
] | null | null | null |
import asyncio
import json
import logging
import os
import time
from typing import Optional, List, Callable
from aiohttp import ClientSession
from homeassistant.const import ATTR_BATTERY_LEVEL
from .sonoff_cloud import EWeLinkCloud
from .sonoff_local import EWeLinkLocal
_LOGGER = logging.getLogger(__name__)
ATTRS = ('local', 'cloud', 'rssi', 'humidity', 'temperature', 'power',
'current', 'voltage', 'consumption', 'water', ATTR_BATTERY_LEVEL)
def load_cache(filename: str):
"""Load device list from file."""
if os.path.isfile(filename):
try:
with open(filename, 'rt', encoding='utf-8') as f:
return json.load(f)
except:
_LOGGER.error("Can't read cache file.")
return None
def save_cache(filename: str, data: dict):
"""Save device list to file."""
with open(filename, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, separators=(',', ':'))
def get_attrs(state: dict) -> dict:
return {k: state[k] for k in ATTRS if k in state}
class EWeLinkRegistry:
"""
device:
params: dict, init state
uiid: Union[int, str], cloud or local type (strip, plug, light, rf)
extra: dict, device manufacturer and model
online: bool, cloud online state
host: str, local IP (local online state)
handlers: list, update handlers
"""
devices: Optional[dict] = None
# for bulk send switches command
bulk_params = {}
def __init__(self, session: ClientSession):
self.cloud = EWeLinkCloud(session)
self.local = EWeLinkLocal(session)
def _registry_handler(self, deviceid: str, state: dict, sequence: str):
"""Feedback from local and cloud connections
:param deviceid: example `1000abcdefg`
:param state: example `{'switch': 'on'}`
:param sequence: message serial number to verify uniqueness
"""
device: dict = self.devices.get(deviceid)
if not device:
_LOGGER.warning(f"Unknown deviceid: {deviceid}")
return
# skip update with same sequence (from cloud and local or from local)
if sequence:
sequence = int(sequence)
ts = time.time()
# skip same and lower sequence in last 10 seconds
if ('seq' in device and ts - device['seq_ts'] < 10 and
sequence <= device['seq']):
_LOGGER.debug("Skip update with same sequence")
return
device['seq'] = sequence
device['seq_ts'] = ts
# check when cloud offline first time
if state.get('cloud') == 'offline' and device.get('host'):
coro = self.local.check_offline(deviceid)
asyncio.create_task(coro)
if 'handlers' in device:
# TODO: right place?
device['available'] = device.get('online') or device.get('host')
attrs = get_attrs(state)
try:
for handler in device['handlers']:
handler(state, attrs)
except Exception as e:
_LOGGER.exception(f"Registry update error: {e}")
def concat_devices(self, newdevices: dict):
"""Concat current device list with new device list."""
if self.devices:
for deviceid, devicecfg in newdevices.items():
if deviceid in self.devices:
self.devices[deviceid].update(devicecfg)
else:
self.devices[deviceid] = devicecfg
else:
self.devices = newdevices
def cache_load_devices(self, cachefile: str):
"""Load devices from cache."""
self.devices = load_cache(cachefile)
async def cloud_login(self, username: str, password: str):
return await self.cloud.login(username, password)
async def cloud_load_devices(self, cachefile: str = None):
"""Load devices list from Cloud Servers."""
newdevices = await self.cloud.load_devices()
if newdevices is not None:
newdevices = {p['deviceid']: p for p in newdevices}
if cachefile:
save_cache(cachefile, newdevices)
self.devices = newdevices
async def cloud_start(self):
if self.devices is None:
self.devices = {}
await self.cloud.start([self._registry_handler], self.devices)
async def local_start(self, handlers: List[Callable], zeroconf):
if self.devices is None:
self.devices = {}
if handlers:
handlers.append(self._registry_handler)
else:
handlers = [self._registry_handler]
self.local.start(handlers, self.devices, zeroconf)
async def stop(self, *args):
# TODO: do something
pass
async def send(self, deviceid: str, params: dict):
"""Send command to device."""
seq = str(int(time.time() * 1000))
device: dict = self.devices[deviceid]
can_local = self.local.started and device.get('host')
can_cloud = self.cloud.started and device.get('online')
state = {}
if can_local and can_cloud:
# try to send a command locally (wait no more than a second)
state['local'] = await self.local.send(deviceid, params, seq, 1)
# otherwise send a command through the cloud
if state['local'] != 'online':
state['cloud'] = await self.cloud.send(deviceid, params, seq)
if state['cloud'] != 'online':
coro = self.local.check_offline(deviceid)
asyncio.create_task(coro)
elif can_local:
state['local'] = await self.local.send(deviceid, params, seq, 5)
if state['local'] != 'online':
coro = self.local.check_offline(deviceid)
asyncio.create_task(coro)
elif can_cloud:
state['cloud'] = await self.cloud.send(deviceid, params, seq)
else:
return
# update device attrs
self._registry_handler(deviceid, state, None)
async def bulk(self, deviceid: str, params: dict):
"""For bulk send switches command. You cannot send two commands
simultaneously to different channels. This causes errors on local and
cloud connections.
https://github.com/AlexxIT/SonoffLAN/issues/139
https://github.com/AlexxIT/SonoffLAN/issues/151
"""
assert 'switches' in params, params
if deviceid not in self.bulk_params:
self.bulk_params[deviceid] = params
await asyncio.sleep(0.1)
return await self.send(deviceid, self.bulk_params.pop(deviceid))
else:
self.bulk_params[deviceid]['switches'] += params['switches']
class EWeLinkDevice:
registry: EWeLinkRegistry = None
deviceid: str = None
channels: list = None
_attrs: dict = None
_name: str = None
_is_on: bool = None
_is_th_3_4_0: bool = False
def __init__(self, registry: EWeLinkRegistry, deviceid: str,
channels: list = None):
self.registry = registry
self.deviceid = deviceid
self.channels = channels
def _init(self, force_refresh: bool = True) -> dict:
device: dict = self.registry.devices[self.deviceid]
# Присваиваем имя устройства только на этом этапе, чтоб в `entity_id`
# было "sonoff_{unique_id}". Если имя присвоить в конструкторе - в
# `entity_id` попадёт имя в латинице.
# TODO: fix init name
if self.channels and len(self.channels) == 1:
ch = str(self.channels[0] - 1)
self._name = device.get('tags', {}).get('ck_channel_name', {}). \
get(ch) or device.get('name')
else:
self._name = device.get('name')
state = device['params']
self._attrs = device['extra'] or {}
# don't know if deviceType only in Sonoff TH
# https://github.com/AlexxIT/SonoffLAN/issues/158
self._is_th_3_4_0 = 'deviceType' in state
if force_refresh:
attrs = get_attrs(state)
self._update_handler(state, attrs)
# init update_handler
device['handlers'].append(self._update_handler)
return device
def _is_on_list(self, state: dict) -> List[bool]:
if self.channels:
# very rarely channels can be reversed
# https://github.com/AlexxIT/SonoffLAN/issues/146
return [
switch['switch'] == 'on'
for switch in state['switches']
if switch['outlet'] + 1 in self.channels
]
else:
return [state['switch'] == 'on']
def _update_handler(self, state: dict, attrs: dict):
raise NotImplemented
async def _turn_on(self):
if self.channels:
switches = [
{'outlet': channel - 1, 'switch': 'on'}
for channel in self.channels
]
await self.registry.bulk(self.deviceid, {'switches': switches})
elif self._is_th_3_4_0:
await self.registry.send(self.deviceid, {
'switch': 'on', 'mainSwitch': 'on', 'deviceType': 'normal'})
else:
await self.registry.send(self.deviceid, {'switch': 'on'})
async def _turn_off(self):
if self.channels:
switches = [
{'outlet': channel - 1, 'switch': 'off'}
for channel in self.channels
]
await self.registry.bulk(self.deviceid, {'switches': switches})
elif self._is_th_3_4_0:
await self.registry.send(self.deviceid, {
'switch': 'off', 'mainSwitch': 'off', 'deviceType': 'normal'})
else:
await self.registry.send(self.deviceid, {'switch': 'off'})
async def _turn_bulk(self, channels: dict):
"""Включает, либо выключает указанные каналы.
:param channels: Словарь каналов, ключ - номер канала, значение - bool
"""
switches = [
{'outlet': channel - 1, 'switch': 'on' if switch else 'off'}
for channel, switch in channels.items()
]
await self.registry.send(self.deviceid, {'switches': switches})
| 34.46
| 78
| 0.587444
|
acfe61904c96d94c9c05c1cd829ebd977256acc9
| 5,176
|
py
|
Python
|
dotapatch/__main__.py
|
arthurazs/dotapatch
|
1b9db278d0d418124bfd3e66fb288ee81cbd66ec
|
[
"MIT"
] | 12
|
2018-03-02T10:03:14.000Z
|
2019-05-30T17:56:09.000Z
|
dotapatch/__main__.py
|
arthurazs/dotapatch
|
1b9db278d0d418124bfd3e66fb288ee81cbd66ec
|
[
"MIT"
] | 28
|
2016-11-07T16:16:24.000Z
|
2018-07-30T21:16:54.000Z
|
dotapatch/__main__.py
|
arthurazs/dotapatch
|
1b9db278d0d418124bfd3e66fb288ee81cbd66ec
|
[
"MIT"
] | 2
|
2018-03-29T07:02:14.000Z
|
2018-12-01T16:00:35.000Z
|
'''dotapatch's entry point'''
from __future__ import absolute_import
import os.path as path
from argparse import ArgumentParser
from logging import DEBUG, StreamHandler, Formatter, FileHandler
from logging import getLogger as get_logger, getLevelName as get_level
from dotapatch.version import __version__
from dotapatch.patch import parse
from dotapatch.data import HeropediaData
def get_parser():
'''Creates app's arguments.
Returns
-------
parser : ArgumentParser
Parser with all arguments.
'''
parser = ArgumentParser(
prog='dotapatch', description='Parses Dota 2 text patches to html'
' format.')
parser.add_argument(
'changelogs', action='store', help='changelog to be formated',
metavar='changelog_file', nargs='*', default=None)
parser.add_argument(
'-t', '--template', action='store', dest='template',
default='default', help='base template to generate HTML',
metavar='template_file')
parser.add_argument(
'-u', '--update-data', action='store_true', dest='update',
help='force heropediadata update')
parser.add_argument(
'-V', '--version', action='version',
version='%(prog)s: v{}'
.format(__version__))
log_group = parser.add_argument_group('logging arguments')
log_group.add_argument(
'-d', '--debug', help='set verbosity level to DEBUG',
action='store_const', dest='log_level',
const='DEBUG', default='INFO')
log_group.add_argument(
'-s', '--save-log', help='save log output',
action='store_true', dest='save_log')
log_group.add_argument(
'-q', '--quiet', help='less verbose',
action='store_const', dest='log_level',
const='ERROR')
return parser
def dotapatch(
changelogs, template='default', update=False):
'''Dotapatch's core.
Get the arguments, initializes logging, parses the changelogs.
Parameters
----------
changelogs : list
Changelog to be parsed.
It can be either the filename or the absolute_filepath/filename.
template : str (optional, 'default')
Template to be loaded.
It can be either the template name or the absolute_path/template.
update : bool (optional, False)
Whether heropedia's data should be updated or not.
Returns
-------
status : int
Parsing status.
'''
if update:
HeropediaData._download_file(HeropediaData.ITEM_DATA)
HeropediaData._download_file(HeropediaData.HERO_DATA)
status = 0
if changelogs:
for filename in changelogs:
try:
status += parse(filename, template)
except OSError as err:
filename = path.abspath(filename)
logger = get_logger('dotapatch')
logger.error('{}: {}'.format(err.__class__.__name__, err))
error_body = '''In case {name} is in a directory other than:
{path}
Try:
1) 'cd' over to the correct directory
2) run dotapatch again
e.g.
$ cd /whole/path/to/file/
$ dotapatch {name}
or
1) run dotapatch specifying the /whole/path/to/file/{name}
e.g.
$ dotapatch /whole/path/to/file/{name}
Contact me at @arthurazs if the error persists.'''.format(
path=path.dirname(filename),
name=path.basename(filename))
logger.warning(error_body)
status = -13
return status
def main(testing=False):
'''main method.
Calls get_parser(). If 'changelogs' is empty and 'update' is False,
prints app usage. Otherwise calls dotapatch().
Parameters
----------
testing : bool (optional, False)
Whether main is being called for testing or not.
Returns
-------
status : int
Parsing status.
'''
parser = get_parser()
args = parser.parse_args()
changelogs = args.changelogs
template = args.template
if testing:
log_level = None
else:
log_level = args.log_level
save_log = args.save_log
update = args.update
if log_level:
logger = get_logger('dotapatch')
logger.setLevel(DEBUG)
stream_handler = StreamHandler()
stream_handler.setLevel(get_level(log_level))
stream_formatter = Formatter('%(levelname)s %(message)s')
stream_handler.setFormatter(stream_formatter)
logger.addHandler(stream_handler)
if save_log:
file_handler = FileHandler('dotapatch.log', 'w')
file_handler.setLevel(DEBUG)
file_formatter = Formatter(
'%(asctime)s (%(name)s, line %(lineno)d)\n'
'%(levelname)s %(message)s\n')
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
logger.info('Recording log file at {}'.format(
path.abspath('dotapatch.log')))
if changelogs or update:
status = dotapatch(changelogs, template, update)
return status
else:
parser.print_usage()
return 0
if __name__ == '__main__':
raise SystemExit(main())
| 29.078652
| 76
| 0.621716
|
acfe642939549c9ca2ffa058ff16a245c24495aa
| 501
|
py
|
Python
|
episode-12/models/flat.py
|
CrispyFlowe/python-minecraft-clone
|
ef6ce2e1381b4c97f03c2106c399ed0a0729a968
|
[
"MIT"
] | 1
|
2022-02-23T06:26:41.000Z
|
2022-02-23T06:26:41.000Z
|
episode-12/models/pressure_plate.py
|
CrispyFlowe/python-minecraft-clone
|
ef6ce2e1381b4c97f03c2106c399ed0a0729a968
|
[
"MIT"
] | null | null | null |
episode-12/models/pressure_plate.py
|
CrispyFlowe/python-minecraft-clone
|
ef6ce2e1381b4c97f03c2106c399ed0a0729a968
|
[
"MIT"
] | null | null | null |
transparent = True
is_cube = False
glass = False
colliders = []
vertex_positions = [
[ 0.5, -0.4375, 0.5, 0.5, -0.4375, -0.5, -0.5, -0.4375, -0.5, -0.5, -0.4375, 0.5], # top
[-0.5, -0.4375, 0.5, -0.5, -0.4375, -0.5, 0.5, -0.4375, -0.5, 0.5, -0.4375, 0.5], # bottom
]
tex_coords = [
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0],
]
shading_values = [
[1.0, 1.0, 1.0, 1.0],
[0.4, 0.4, 0.4, 0.4],
]
| 25.05
| 98
| 0.461078
|
acfe66c0cad2136728b7a721af90c4bc98852f92
| 10,506
|
py
|
Python
|
mechroutines/pf/models/blocks.py
|
sjklipp/mechdriver
|
17c3d9bc82116954b331955e87a60e9adc5e1de9
|
[
"Apache-2.0"
] | null | null | null |
mechroutines/pf/models/blocks.py
|
sjklipp/mechdriver
|
17c3d9bc82116954b331955e87a60e9adc5e1de9
|
[
"Apache-2.0"
] | null | null | null |
mechroutines/pf/models/blocks.py
|
sjklipp/mechdriver
|
17c3d9bc82116954b331955e87a60e9adc5e1de9
|
[
"Apache-2.0"
] | null | null | null |
""" Writes the MESS strings for various species and treatments
using data read from the save filesystem.
"""
import automol.combine
import mess_io
from phydat import phycon
def barrier_dat_block(ts_dct, reac_dcts, prod_dcts):
""" prepare a block for data for the barrier
"""
flux_dat = {} # zero for now, need to set for vrctst? chk build
writer_typ = ts_dct['writer']
if writer_typ == 'species_block':
mstr, mdhr_dat = species_block(ts_dct)
elif writer_typ == 'pst_block':
side_dcts = reac_dcts if len(reac_dcts) == 2 else prod_dcts
mstr, mdhr_dat = pst_block(ts_dct, *side_dcts)
elif writer_typ == 'vrctst_block':
mstr, mdhr_dat = vrctst_block(ts_dct, *reac_dcts)
elif writer_typ == 'rpvtst_block':
mstr, mdhr_dat = rpvtst_block(ts_dct, *reac_dcts)
return mstr, mdhr_dat, flux_dat
# SINGLE SPECIES BLOCKS
def atom_block(inf_dct):
""" prepare the species input for messpf
"""
# Initialize an empty dat_dct for the return
dat_dct = {}
# Build the appropriate MESS string
spc_str = mess_io.writer.atom(
mass=inf_dct['mass'],
elec_levels=inf_dct['elec_levels'])
return spc_str, dat_dct
def species_block(inf_dct):
""" prepare the species input for messpf
"""
# Build the data files dct
dat_dct = {}
# Build the appropriate core string
if inf_dct['mdhr_dat']:
core_str = mess_io.writer.core_multirotor(
geo=inf_dct['geom'],
sym_factor=inf_dct['sym_factor'],
pot_surf_file='mdhr_pot.dat',
int_rot_str=inf_dct['mess_hr_str'],
interp_emax=100,
quant_lvl_emax=9
)
hind_rot_str = ''
dat_dct['mdhr_pot.dat'] = inf_dct['mdhr_dat']
else:
core_str = mess_io.writer.core_rigidrotor(
geo=inf_dct['geom'],
sym_factor=inf_dct['sym_factor'],
interp_emax=None
)
hind_rot_str = inf_dct['mess_hr_str']
# Build the appropriate MESS string
spc_str = mess_io.writer.molecule(
core=core_str,
elec_levels=inf_dct['elec_levels'],
freqs=inf_dct['freqs'],
hind_rot=hind_rot_str,
xmat=inf_dct['xmat'],
rovib_coups=inf_dct['rovib_coups'],
rot_dists=inf_dct['rot_dists'],
inf_intens=(),
freq_scale_factor=None,
use_harmfreqs_key=False
)
return spc_str, dat_dct
def fake_species_block(inf_dct_i, inf_dct_j):
""" prepare a fake species block corresponding to the
van der Waals well between two fragments
"""
# Combine the electronic structure information for the two species together
geom = automol.combine.fake_vdw_geometry(
inf_dct_i['geom'], inf_dct_j['geom'])
sym_factor = inf_dct_i['sym_factor'] * inf_dct_j['sym_factor']
elec_levels = automol.combine.electronic_energy_levels(
inf_dct_i['elec_levels'], inf_dct_j['elec_levels'])
fake_freqs = automol.combine.fake_vdw_frequencies(
inf_dct_i['geom'], inf_dct_j['geom'])
freqs = fake_freqs + inf_dct_i['freqs'] + inf_dct_j['freqs']
mess_hr_str = inf_dct_i['mess_hr_str'] + inf_dct_j['mess_hr_str']
# Write the MESS string for the fake molecule
core_str = mess_io.writer.core_rigidrotor(
geo=geom,
sym_factor=sym_factor,
interp_emax=None
)
spc_str = mess_io.writer.molecule(
core=core_str,
freqs=freqs,
elec_levels=elec_levels,
hind_rot=mess_hr_str,
xmat=(),
rovib_coups=(),
rot_dists=(),
inf_intens=(),
freq_scale_factor=None,
use_harmfreqs_key=False
)
# Fix? I don't think you can do multirotor for the phase space theory
dat_dct = {}
return spc_str, dat_dct
def pst_block(ts_inf_dct, inf_dct_i, inf_dct_j):
""" prepare a Phase Space Theory species block
"""
# Combine the electronic structure information for the two species together
sym_factor = inf_dct_i['sym_factor'] * inf_dct_j['sym_factor']
elec_levels = automol.combine.electronic_energy_levels(
inf_dct_i['elec_levels'], inf_dct_j['elec_levels'])
freqs = inf_dct_i['freqs'] + inf_dct_j['freqs']
mess_hr_str = inf_dct_i['mess_hr_str'] + inf_dct_j['mess_hr_str']
# Get the total stoichiometry of the two species
stoich = automol.combine.formula_string(
inf_dct_i['geom'], inf_dct_j['geom'])
# Write the MESS string for the Phase Space Theory TS
core_str = mess_io.writer.core_phasespace(
geo1=inf_dct_i['geom'],
geo2=inf_dct_j['geom'],
sym_factor=sym_factor,
stoich=stoich,
pot_prefactor=ts_inf_dct['cn_pst'],
pot_exp=ts_inf_dct['n_pst'],
tstlvl='ej'
)
spc_str = mess_io.writer.molecule(
core=core_str,
freqs=freqs,
elec_levels=elec_levels,
hind_rot=mess_hr_str,
xmat=(),
rovib_coups=(),
rot_dists=(),
inf_intens=(),
freq_scale_factor=None,
use_harmfreqs_key=False
)
# Need to fix
dat_dct = {}
return spc_str, dat_dct
def tau_block(inf_dct):
""" write MESS string when using the Tau MonteCarlo
"""
# Write the data string and set its name
dat_str = mess_io.writer.monte_carlo.mc_data(
geos=inf_dct['samp_geoms'],
enes=inf_dct['samp_enes'],
grads=inf_dct['samp_grads'],
hessians=inf_dct['samp_hessians']
)
# Set the name of the tau dat file and add to dct
tau_dat_file_name = 'tau.dat'
dat_dct = {tau_dat_file_name: dat_str}
# Write additional reference configuration file if needed
if inf_dct['ref_geom'] and inf_dct['ref_grad'] and inf_dct['ref_hessian']:
ref_config_file_name = 'reftau.dat'
ref_dat_str = mess_io.writer.monte_carlo.mc_data(
geos=inf_dct['ref_geom'],
enes=['0.00'],
grads=inf_dct['ref_grad'],
hessians=inf_dct['ref_hessian']
)
ref_dat_str = "\n".join(ref_dat_str.splitlines()[1:])
dat_dct.update({ref_config_file_name: ref_dat_str})
else:
ref_config_file_name = ''
# Write the core string (seperate energies?)
spc_str = mess_io.writer.monte_carlo.mc_species(
geo=inf_dct['geom'],
sym_factor=inf_dct['sym_factor'],
elec_levels=inf_dct['elec_levels'],
flux_mode_str=inf_dct['flux_mode_str'],
data_file_name=tau_dat_file_name,
reference_energy=inf_dct['reference_energy'],
ref_config_file_name=ref_config_file_name,
ground_energy=0.0,
freqs=inf_dct['freqs'],
use_cm_shift=True
)
return spc_str, dat_dct
def multiconfig_block(inf_dct_lst):
""" write a
"""
spc_str_lst = tuple(dct for dct in inf_dct_lst)
config_str = configs_union(spc_str_lst)
return config_str
# TS BLOCKS FOR VARIATIONAL TREATMENTS
def vrctst_block(inf_dct_ts, inf_dct_i, inf_dct_j):
""" write a VRCTST block
"""
# Build the data files dct
dat_dct = {}
# Combine electronic structure information for the two species together
sym_factor = inf_dct_i['sym_factor'] * inf_dct_j['sym_factor'] * 0.850
elec_levels = automol.combine.electronic_energy_levels(
inf_dct_i['elec_levels'], inf_dct_j['elec_levels'])
freqs = inf_dct_i['freqs'] + inf_dct_j['freqs']
mess_hr_str = inf_dct_i['mess_hr_str'] + inf_dct_j['mess_hr_str']
# Get the total stoichiometry of the two species
stoich = automol.combine.formula_string(
inf_dct_i['geom'], inf_dct_j['geom'])
# Set the auxiliary flux file information
flux_file_name = '{}_flux.dat'.format('ts')
dat_dct[flux_file_name] = inf_dct_ts['flux_str']
# Write the MESS string for the VRCTST TS
core_str = mess_io.writer.core_rotd(
sym_factor=sym_factor,
flux_file_name=flux_file_name,
stoich=stoich
)
spc_str = mess_io.writer.molecule(
core=core_str,
freqs=freqs,
elec_levels=elec_levels,
hind_rot=mess_hr_str,
xmat=(),
rovib_coups=(),
rot_dists=(),
inf_intens=(),
freq_scale_factor=None,
use_harmfreqs_key=False
)
return spc_str, dat_dct
def rpvtst_block(ts_inf_dct, inf_dct_i, inf_dct_j):
""" prepare the mess input string for a variational TS that does not have
a saddle point. Do it by calling the species block for each grid point
in the scan file system
"""
# Combine electronic structure information for the two species together
sym_factor = inf_dct_i['sym_factor'] * inf_dct_j['sym_factor']
mess_hr_str = inf_dct_i['mess_hr_str'] + inf_dct_j['mess_hr_str']
elec_levels = [[0.0, 1.0]]
# elec_levels = automol.combine.electronic_energy_levels(
# inf_dct_i['elec_levels'], inf_dct_j['elec_levels'])
rpath_strs = []
for idx, dct in enumerate(ts_inf_dct['rpath']):
# Iniialize the header of the rxn path pt string
rpath_str = '!-----------------------------------------------\n'
rpath_str += '! Rxn Path Pt {0}: '.format(str(idx+1))
rpath_str += 'R = {0:.2f} Ang'.format(dct['rval'] * phycon.BOHR2ANG)
rpath_str += '\n\n'
# Write MESS string for the rxn path pt; add to rxn path pt string
core_str = mess_io.writer.core_rigidrotor(
geo=dct['geom'],
sym_factor=sym_factor,
interp_emax=None
)
rpath_str += mess_io.writer.molecule(
core=core_str,
freqs=dct['freqs'],
elec_levels=elec_levels,
hind_rot=mess_hr_str,
xmat=(),
rovib_coups=(),
rot_dists=()
)
# Append rxn path pt string to full list of rpath strings
rpath_strs.append(rpath_str)
return rpath_strs, {}
#
#
# def vtst_energy():
# """ Get the VTST energy
# """
# if not saddle:
# # Calcuate infinite separation ZPVE
# # Assumes the ZPVE = ZPVE(1st grid pt) as an approximation
# if idx == 0:
# rct_zpe = zpe
#
# # Calculate the reference energies
# ene_rel = (ene - inf_sep_ene) * phycon.EH2KCAL
# zpe_rel = zpe - rct_zpe
# eref_abs = ene_rel + zpe_rel + spc_ene
# if saddle:
# # Calculate the relative energy
# erel = ene * phycon.EH2KCAL + zpe - first_ground_ene
#
# return erel
| 30.103152
| 79
| 0.635066
|
acfe691fe5e1f80fb8bd14f465794712ce65b32f
| 4,965
|
py
|
Python
|
torchvision/prototype/datasets/_builtin/voc.py
|
lorenzo-park/vision
|
ac561bcefd75f503b27e9154938b2a54d560b276
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/prototype/datasets/_builtin/voc.py
|
lorenzo-park/vision
|
ac561bcefd75f503b27e9154938b2a54d560b276
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/prototype/datasets/_builtin/voc.py
|
lorenzo-park/vision
|
ac561bcefd75f503b27e9154938b2a54d560b276
|
[
"BSD-3-Clause"
] | null | null | null |
import functools
import io
import pathlib
from typing import Any, Callable, Dict, List, Optional, Tuple
from xml.etree import ElementTree
import torch
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
Filter,
Demultiplexer,
IterKeyZipper,
LineReader,
)
from torchvision.datasets import VOCDetection
from torchvision.prototype.datasets.utils import (
Dataset,
DatasetConfig,
DatasetInfo,
HttpResource,
OnlineResource,
DatasetType,
)
from torchvision.prototype.datasets.utils._internal import (
path_accessor,
getitem,
INFINITE_BUFFER_SIZE,
path_comparator,
hint_sharding,
hint_shuffling,
)
HERE = pathlib.Path(__file__).parent
class VOC(Dataset):
def _make_info(self) -> DatasetInfo:
return DatasetInfo(
"voc",
type=DatasetType.IMAGE,
homepage="http://host.robots.ox.ac.uk/pascal/VOC/",
valid_options=dict(
split=("train", "val", "test"),
year=("2012",),
task=("detection", "segmentation"),
),
)
def resources(self, config: DatasetConfig) -> List[OnlineResource]:
if config.year == "2012":
if config.split == "train":
archive = HttpResource(
"http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar",
sha256="e14f763270cf193d0b5f74b169f44157a4b0c6efa708f4dd0ff78ee691763bcb",
)
else:
raise RuntimeError("FIXME")
else:
raise RuntimeError("FIXME")
return [archive]
_ANNS_FOLDER = dict(
detection="Annotations",
segmentation="SegmentationClass",
)
_SPLIT_FOLDER = dict(
detection="Main",
segmentation="Segmentation",
)
def _is_in_folder(self, data: Tuple[str, Any], *, name: str, depth: int = 1) -> bool:
path = pathlib.Path(data[0])
return name in path.parent.parts[-depth:]
def _classify_archive(self, data: Tuple[str, Any], *, config: DatasetConfig) -> Optional[int]:
if self._is_in_folder(data, name="ImageSets", depth=2):
return 0
elif self._is_in_folder(data, name="JPEGImages"):
return 1
elif self._is_in_folder(data, name=self._ANNS_FOLDER[config.task]):
return 2
else:
return None
def _decode_detection_ann(self, buffer: io.IOBase) -> torch.Tensor:
result = VOCDetection.parse_voc_xml(ElementTree.parse(buffer).getroot()) # type: ignore[arg-type]
objects = result["annotation"]["object"]
bboxes = [obj["bndbox"] for obj in objects]
bboxes = [[int(bbox[part]) for part in ("xmin", "ymin", "xmax", "ymax")] for bbox in bboxes]
return torch.tensor(bboxes)
def _collate_and_decode_sample(
self,
data: Tuple[Tuple[Tuple[str, str], Tuple[str, io.IOBase]], Tuple[str, io.IOBase]],
*,
config: DatasetConfig,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> Dict[str, Any]:
split_and_image_data, ann_data = data
_, image_data = split_and_image_data
image_path, image_buffer = image_data
ann_path, ann_buffer = ann_data
image = decoder(image_buffer) if decoder else image_buffer
if config.task == "detection":
ann = self._decode_detection_ann(ann_buffer)
else: # config.task == "segmentation":
ann = decoder(ann_buffer) if decoder else ann_buffer # type: ignore[assignment]
return dict(image_path=image_path, image=image, ann_path=ann_path, ann=ann)
def _make_datapipe(
self,
resource_dps: List[IterDataPipe],
*,
config: DatasetConfig,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> IterDataPipe[Dict[str, Any]]:
archive_dp = resource_dps[0]
split_dp, images_dp, anns_dp = Demultiplexer(
archive_dp,
3,
functools.partial(self._classify_archive, config=config),
drop_none=True,
buffer_size=INFINITE_BUFFER_SIZE,
)
split_dp = Filter(split_dp, self._is_in_folder, fn_kwargs=dict(name=self._SPLIT_FOLDER[config.task]))
split_dp = Filter(split_dp, path_comparator("name", f"{config.split}.txt"))
split_dp = LineReader(split_dp, decode=True)
split_dp = hint_sharding(split_dp)
split_dp = hint_shuffling(split_dp)
dp = split_dp
for level, data_dp in enumerate((images_dp, anns_dp)):
dp = IterKeyZipper(
dp,
data_dp,
key_fn=getitem(*[0] * level, 1),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
return Mapper(dp, self._collate_and_decode_sample, fn_kwargs=dict(config=config, decoder=decoder))
| 34.006849
| 109
| 0.616717
|
acfe69f0804b9792ad83b4e356803271e57f7b47
| 795
|
py
|
Python
|
src/spaceone/core/handler/mutation_handler.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/core/handler/mutation_handler.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/core/handler/mutation_handler.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | null | null | null |
import logging
from spaceone.core.handler import BaseMutationHandler
_LOGGER = logging.getLogger(__name__)
class SpaceONEMutationHandler(BaseMutationHandler):
def request(self, params):
role_type = self.transaction.get_meta('authorization.role_type')
domain_id = self.transaction.get_meta('domain_id')
append_parameter = self.transaction.get_meta('mutation.append_parameter', {})
if role_type in ['DOMAIN', 'PROJECT', 'USER']:
params['domain_id'] = domain_id
if role_type == 'PROJECT':
if isinstance(append_parameter, dict):
for key, value in append_parameter.items():
if key not in params:
params[key] = self.transaction.get_meta(value)
return params
| 33.125
| 85
| 0.654088
|
acfe6a5d50908a9b6fc012cbab0a5ae0461d4c3b
| 7,986
|
py
|
Python
|
python/setup.py
|
lzt-pro/ray
|
cc93fee4a47dc9b9f754d0b53ae2f1e4f598aeb1
|
[
"Apache-2.0"
] | null | null | null |
python/setup.py
|
lzt-pro/ray
|
cc93fee4a47dc9b9f754d0b53ae2f1e4f598aeb1
|
[
"Apache-2.0"
] | 3
|
2021-06-08T21:46:35.000Z
|
2022-03-12T00:35:21.000Z
|
python/setup.py
|
lzt-pro/ray
|
cc93fee4a47dc9b9f754d0b53ae2f1e4f598aeb1
|
[
"Apache-2.0"
] | null | null | null |
from itertools import chain
import os
import re
import shutil
import subprocess
import sys
from setuptools import setup, find_packages, Distribution
import setuptools.command.build_ext as _build_ext
# Ideally, we could include these files by putting them in a
# MANIFEST.in or using the package_data argument to setup, but the
# MANIFEST.in gets applied at the very beginning when setup.py runs
# before these files have been created, so we have to move the files
# manually.
exe_suffix = ".exe" if sys.platform == "win32" else ""
# .pyd is the extension Python requires on Windows for shared libraries.
# https://docs.python.org/3/faq/windows.html#is-a-pyd-file-the-same-as-a-dll
pyd_suffix = ".pyd" if sys.platform == "win32" else ".so"
# NOTE: The lists below must be kept in sync with ray/BUILD.bazel.
ray_files = [
"ray/core/src/ray/thirdparty/redis/src/redis-server",
"ray/core/src/ray/gcs/redis_module/libray_redis_module.so",
"ray/core/src/plasma/plasma_store_server" + exe_suffix,
"ray/_raylet" + pyd_suffix,
"ray/core/src/ray/raylet/raylet_monitor" + exe_suffix,
"ray/core/src/ray/gcs/gcs_server" + exe_suffix,
"ray/core/src/ray/raylet/raylet" + exe_suffix,
"ray/streaming/_streaming.so",
]
build_java = os.getenv("RAY_INSTALL_JAVA") == "1"
if build_java:
ray_files.append("ray/jars/ray_dist.jar")
# These are the directories where automatically generated Python protobuf
# bindings are created.
generated_python_directories = [
"ray/core/generated",
"ray/streaming/generated",
]
optional_ray_files = []
ray_autoscaler_files = [
"ray/autoscaler/aws/example-full.yaml",
"ray/autoscaler/azure/example-full.yaml",
"ray/autoscaler/azure/azure-vm-template.json",
"ray/autoscaler/azure/azure-config-template.json",
"ray/autoscaler/gcp/example-full.yaml",
"ray/autoscaler/local/example-full.yaml",
"ray/autoscaler/kubernetes/example-full.yaml",
"ray/autoscaler/kubernetes/kubectl-rsync.sh",
"ray/autoscaler/ray-schema.json"
]
ray_project_files = [
"ray/projects/schema.json", "ray/projects/templates/cluster_template.yaml",
"ray/projects/templates/project_template.yaml",
"ray/projects/templates/requirements.txt"
]
ray_dashboard_files = [
os.path.join(dirpath, filename)
for dirpath, dirnames, filenames in os.walk("ray/dashboard/client/build")
for filename in filenames
]
optional_ray_files += ray_autoscaler_files
optional_ray_files += ray_project_files
optional_ray_files += ray_dashboard_files
if "RAY_USE_NEW_GCS" in os.environ and os.environ["RAY_USE_NEW_GCS"] == "on":
ray_files += [
"ray/core/src/credis/build/src/libmember.so",
"ray/core/src/credis/build/src/libmaster.so",
"ray/core/src/credis/redis/src/redis-server"
]
extras = {
"debug": [],
"dashboard": ["requests"],
"serve": ["uvicorn", "pygments", "werkzeug", "flask", "pandas", "blist"],
"tune": ["tabulate", "tensorboardX", "pandas"]
}
extras["rllib"] = extras["tune"] + [
"atari_py",
"dm_tree",
"gym[atari]",
"lz4",
"opencv-python-headless",
"pyyaml",
"scipy",
]
extras["streaming"] = ["msgpack >= 0.6.2"]
extras["all"] = list(set(chain.from_iterable(extras.values())))
class build_ext(_build_ext.build_ext):
def run(self):
# Note: We are passing in sys.executable so that we use the same
# version of Python to build packages inside the build.sh script. Note
# that certain flags will not be passed along such as --user or sudo.
# TODO(rkn): Fix this.
command = ["../build.sh", "-p", sys.executable]
if sys.platform == "win32" and command[0].lower().endswith(".sh"):
# We can't run .sh files directly in Windows, so find a shell.
# Don't use "bash" instead of "sh", because that might run the Bash
# from WSL! (We want MSYS2's Bash, which is also sh by default.)
shell = os.getenv("BAZEL_SH", "sh") # NOT "bash"! (see above)
command.insert(0, shell)
if build_java:
# Also build binaries for Java if the above env variable exists.
command += ["-l", "python,java"]
subprocess.check_call(command)
# We also need to install pickle5 along with Ray, so make sure that the
# relevant non-Python pickle5 files get copied.
pickle5_files = self.walk_directory("./ray/pickle5_files/pickle5")
thirdparty_files = self.walk_directory("./ray/thirdparty_files")
files_to_include = ray_files + pickle5_files + thirdparty_files
# Copy over the autogenerated protobuf Python bindings.
for directory in generated_python_directories:
for filename in os.listdir(directory):
if filename[-3:] == ".py":
files_to_include.append(os.path.join(directory, filename))
for filename in files_to_include:
self.move_file(filename)
# Try to copy over the optional files.
for filename in optional_ray_files:
try:
self.move_file(filename)
except Exception:
print("Failed to copy optional file {}. This is ok."
.format(filename))
def walk_directory(self, directory):
file_list = []
for (root, dirs, filenames) in os.walk(directory):
for name in filenames:
file_list.append(os.path.join(root, name))
return file_list
def move_file(self, filename):
# TODO(rkn): This feels very brittle. It may not handle all cases. See
# https://github.com/apache/arrow/blob/master/python/setup.py for an
# example.
source = filename
destination = os.path.join(self.build_lib, filename)
# Create the target directory if it doesn't already exist.
parent_directory = os.path.dirname(destination)
if not os.path.exists(parent_directory):
os.makedirs(parent_directory)
if not os.path.exists(destination):
print("Copying {} to {}.".format(source, destination))
shutil.copy(source, destination, follow_symlinks=True)
class BinaryDistribution(Distribution):
def has_ext_modules(self):
return True
def find_version(*filepath):
# Extract version information from filepath
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, *filepath)) as fp:
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
fp.read(), re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
requires = [
"aiohttp",
"click",
"colorama",
"filelock",
"google",
"grpcio",
"jsonschema",
"msgpack >= 0.6.0, < 1.0.0",
"numpy >= 1.16",
"protobuf >= 3.8.0",
"py-spy >= 0.2.0",
"pyyaml",
"redis >= 3.3.2, < 3.5.0",
]
setup(
name="ray",
version=find_version("ray", "__init__.py"),
author="Ray Team",
author_email="ray-dev@googlegroups.com",
description=("A system for parallel and distributed Python that unifies "
"the ML ecosystem."),
long_description=open("../README.rst").read(),
url="https://github.com/ray-project/ray",
keywords=("ray distributed parallel machine-learning "
"reinforcement-learning deep-learning python"),
packages=find_packages(),
cmdclass={"build_ext": build_ext},
# The BinaryDistribution argument triggers build_ext.
distclass=BinaryDistribution,
install_requires=requires,
setup_requires=["cython >= 0.29.14", "wheel"],
extras_require=extras,
entry_points={
"console_scripts": [
"ray=ray.scripts.scripts:main",
"rllib=ray.rllib.scripts:cli [rllib]", "tune=ray.tune.scripts:cli"
]
},
include_package_data=True,
zip_safe=False,
license="Apache 2.0")
| 34.873362
| 79
| 0.652893
|
acfe6a92828bf1285297bedac5bc3853c46a82cc
| 188
|
py
|
Python
|
Beginner/1064.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/1064.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/1064.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
count = 0
soma = 0
for k in range(6):
num = float(input())
if num > 0:
count += 1
soma += num
print("%d valores positivos" % (count))
print("%.1f" % (soma/count))
| 17.090909
| 39
| 0.521277
|
acfe6b285152bf3c061799a660f181e89b4380b5
| 666
|
py
|
Python
|
Python/pyworkout/files/ex18_mod2.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | null | null | null |
Python/pyworkout/files/ex18_mod2.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | 3
|
2020-03-24T16:26:35.000Z
|
2020-04-15T19:40:41.000Z
|
Python/pyworkout/files/ex18_mod2.py
|
honchardev/Fun
|
ca7c0076e9bb3017c5d7e89aa7d5bd54a83c8ecc
|
[
"MIT"
] | null | null | null |
from io import StringIO
from venv import create
def create_tsv_content(
lines_cnt: int = 10
) -> StringIO:
content = '\n'.join([
f'{line_idx}\t{line_idx*100}'
for line_idx in range(lines_cnt)
])
tsv_content = StringIO(content)
return tsv_content
def assess_content(
content: StringIO,
separator: str = '\t'
) -> int:
result = 0
for line in content:
col1_value, col2_value = line.split(separator)
result += int(col1_value) * int(col2_value)
return result
def main():
tsv_content = create_tsv_content()
print( assess_content(tsv_content) )
if __name__ == '__main__':
main()
| 18.5
| 54
| 0.641141
|
acfe6e51771939b22d47704660d3669b1a06dfe4
| 3,814
|
py
|
Python
|
ts/nni_manager/config/dlc/dlcUtil.py
|
Microsoft/nni
|
b99e26833287c901f6d808d07ea6882014b05f58
|
[
"MIT"
] | 2,305
|
2018-09-07T12:42:26.000Z
|
2019-05-06T20:14:24.000Z
|
ts/nni_manager/config/dlc/dlcUtil.py
|
Eurus-Holmes/nni
|
45af1d6b79b355d3e013f1eae2f4c193bbd84074
|
[
"MIT"
] | 379
|
2018-09-10T10:19:50.000Z
|
2019-05-06T18:04:46.000Z
|
ts/nni_manager/config/dlc/dlcUtil.py
|
Eurus-Holmes/nni
|
45af1d6b79b355d3e013f1eae2f4c193bbd84074
|
[
"MIT"
] | 314
|
2018-09-08T05:36:08.000Z
|
2019-05-06T08:48:51.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
import os
import pathlib
import sys
import traceback
from argparse import ArgumentParser
# ref: https://help.aliyun.com/document_detail/203290.html?spm=a2c4g.11186623.6.727.6f9b5db6bzJh4x
from alibabacloud_pai_dlc20201203.client import Client
from alibabacloud_tea_openapi.models import Config
from alibabacloud_pai_dlc20201203.models import * #CreateJobRequest, JobSpec
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('--type', help='the type of job spec')
parser.add_argument('--image', help='the docker image of job')
parser.add_argument('--job_type', choices=['TFJob', 'PyTorchJob'], help='the job type')
parser.add_argument('--pod_count', type=int, default=1, help='pod count')
parser.add_argument('--ecs_spec', help='ecs spec')
parser.add_argument('--region', help='region')
parser.add_argument('--workspace_id', help='workspace id for your project')
parser.add_argument('--nas_data_source_id', help='nas data_source_id of DLC dataset configuration')
parser.add_argument('--oss_data_source_id', help='oss data_source_id of DLC dataset configuration')
parser.add_argument('--access_key_id', help='access_key_id')
parser.add_argument('--access_key_secret', help='access_key_secret')
parser.add_argument('--experiment_name', help='the experiment name')
parser.add_argument('--user_command', help='user command')
parser.add_argument('--log_dir', help='exception log dir')
args = parser.parse_args()
pathlib.Path(args.log_dir).mkdir(parents=True, exist_ok=True)
logging.basicConfig(filename=os.path.join(args.log_dir, 'dlc_exception.log'),
format='%(asctime)s %(message)s',
level=logging.INFO)
# DLC submit
try:
# init client
client = Client(
Config(
access_key_id=args.access_key_id,
access_key_secret=args.access_key_secret,
region_id=args.region,
endpoint=f'pai-dlc.{args.region}.aliyuncs.com'
)
)
nas_1 = DataSourceItem(
data_source_type='nas',
data_source_id=args.nas_data_source_id,
)
oss = None
if args.oss_data_source_id:
oss = DataSourceItem(
data_source_type='oss',
data_source_id=args.oss_data_source_id,
)
# job spec
spec = JobSpec(
type=args.type,
image=args.image,
pod_count=args.pod_count,
ecs_spec=args.ecs_spec,
)
data_sources = [nas_1]
if oss:
data_sources = [nas_1, oss]
req = CreateJobRequest(
display_name=args.experiment_name,
job_type=args.job_type,
job_specs=[spec],
data_sources=data_sources,
user_command=args.user_command,
workspace_id=args.workspace_id,
)
response = client.create_job(req)
job_id = response.body.job_id
print('job id: ' + job_id)
while True:
line = sys.stdin.readline().rstrip()
if line == 'update_status':
print('status:' + client.get_job(job_id).body.status)
elif line == 'tracking_url':
#TODO: 1. get this url by api? 2. change this url in private dlc mode.
print('tracking_url:' + f'https://pai-dlc.console.aliyun.com/#/jobs/detail?jobId={job_id}®ionId={args.region}')
elif line == 'stop':
client.stop_job(job_id)
exit(0)
except Exception as e:
logging.error('DLC submit Exception: \n')
logging.error(e, exc_info=1)
| 37.762376
| 130
| 0.628474
|
acfe6fab25f40648ff9e9c9321fcec4950ab0a21
| 105
|
py
|
Python
|
library/__init__.py
|
cddysq/python
|
d576270263b0b3fffa3fdd2df87d6710159e1415
|
[
"MIT"
] | null | null | null |
library/__init__.py
|
cddysq/python
|
d576270263b0b3fffa3fdd2df87d6710159e1415
|
[
"MIT"
] | null | null | null |
library/__init__.py
|
cddysq/python
|
d576270263b0b3fffa3fdd2df87d6710159e1415
|
[
"MIT"
] | null | null | null |
# -*-coding:utf-8 -*-
# @author :cddysq
# @date :2021/12/28 17:04
# @version :v1.0.0
""" 自定义解析包 """
| 15
| 28
| 0.52381
|
acfe708392b03f43ffc2d455044953a8444c84ae
| 3,289
|
py
|
Python
|
lcc/stars_processing/tools/stats_manager.py
|
mavrix93/LightCurvesClassifier
|
a0a51f033cb8adf45296913f0de0aa2568e0530c
|
[
"MIT"
] | 12
|
2017-06-07T12:16:06.000Z
|
2021-02-28T10:06:44.000Z
|
lcc/stars_processing/tools/stats_manager.py
|
mavrix93/LightCurvesClassifier
|
a0a51f033cb8adf45296913f0de0aa2568e0530c
|
[
"MIT"
] | 6
|
2017-03-10T21:52:00.000Z
|
2021-08-25T15:02:35.000Z
|
lcc/stars_processing/tools/stats_manager.py
|
mavrix93/LightCurvesClassifier
|
a0a51f033cb8adf45296913f0de0aa2568e0530c
|
[
"MIT"
] | 3
|
2017-05-21T16:20:24.000Z
|
2019-06-10T11:06:42.000Z
|
from matplotlib import pyplot as plt
import os
from lcc.data_manager.status_resolver import StatusResolver
import numpy as np
class StatsManager(object):
"""
Attributes
----------
stats : list
List of dictionaries. They consists of statistical values.
Or at least with "false_positive_rate" and "true_positive_rate"
in order to work properly.
"""
def __init__(self, stats):
"""
Parameters
----------
stats : list
List of dictionaries. They consists of statistical values.
Or at least with "false_positive_rate" and "true_positive_rate"
in order to work properly.
"""
self.stats = stats
def getROC(self):
"""
Get ROC curve
Returns
-------
list
List of fp values and tp values values
"""
x = []
y = []
for stat in self.stats:
x.append(stat.get("false_positive_rate"))
y.append(stat.get("true_positive_rate"))
sort_map = np.argsort(x)
return [np.array(x)[sort_map], np.array(y)[sort_map]]
def saveROCfile(self, path, file_name="roc_curve.dat", delim=None):
"""
Save ROC data into the file
Parameters
----------
path : str
Path to the output file location
file_name : str
Name of the file
delim : str
Delimiter of columns
Returns
-------
None
"""
if not delim:
delim = "\t"
roc = np.array(self.getROC()).T
with open(os.path.join(path, file_name), "w") as f:
f.write("#fp%stp\n" % delim)
np.savetxt(f, roc, fmt="%.2f", delimiter=delim)
return roc
def plotROC(self, save=False, title="ROC curve", path=".",
file_name="roc_plot.png"):
"""
Plot ROC and show it or save it
Parameters
----------
save : bool
If True plot is saved into the file
title : str
Title of the plot
path : str
Path to the output file location
file_name : str
Name of the file
Returns
-------
None
"""
roc = self.getROC()
plt.plot(roc[0], roc[1], "b-", linewidth=2)
plt.plot([0, 1], [0, 1], "r--")
plt.title(title)
plt.xlabel("False positive rate")
plt.ylabel("True positive rate")
if not save:
plt.show()
else:
plt.savefig(os.path.join(path, file_name))
plt.clf()
def saveStats(self, path=".", file_name="stats.dat", delim=None, overwrite=True):
"""
Save stats file into the file
Parameters
----------
path : str
Path to the output file location
file_name : str
Name of the file
delim : str
Delimiter of columns
overwrite : bool
Overwrite file if it exists
Returns
-------
None
"""
if not delim:
delim = "\t"
StatusResolver.save_query(
self.stats, file_name, path, delim, overwrite)
| 24.183824
| 85
| 0.505625
|
acfe708def1e286cb08edf06219bc3975dee1ca6
| 2,779
|
py
|
Python
|
datasets/wmt18/wmt18.py
|
TheophileBlard/nlp
|
2e0a8639a79b1abc848cff5c669094d40bba0f63
|
[
"Apache-2.0"
] | 3
|
2020-05-19T05:15:12.000Z
|
2020-10-03T11:44:42.000Z
|
datasets/wmt18/wmt18.py
|
TheophileBlard/nlp
|
2e0a8639a79b1abc848cff5c669094d40bba0f63
|
[
"Apache-2.0"
] | null | null | null |
datasets/wmt18/wmt18.py
|
TheophileBlard/nlp
|
2e0a8639a79b1abc848cff5c669094d40bba0f63
|
[
"Apache-2.0"
] | 1
|
2020-12-08T10:36:30.000Z
|
2020-12-08T10:36:30.000Z
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace NLP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""WMT18: Translate dataset."""
import nlp
from .wmt_utils import CWMT_SUBSET_NAMES, Wmt, WmtConfig
_URL = "http://www.statmt.org/wmt18/translation-task.html"
_CITATION = """\
@InProceedings{bojar-EtAl:2018:WMT1,
author = {Bojar, Ond\v{r}ej and Federmann, Christian and Fishel, Mark
and Graham, Yvette and Haddow, Barry and Huck, Matthias and
Koehn, Philipp and Monz, Christof},
title = {Findings of the 2018 Conference on Machine Translation (WMT18)},
booktitle = {Proceedings of the Third Conference on Machine Translation,
Volume 2: Shared Task Papers},
month = {October},
year = {2018},
address = {Belgium, Brussels},
publisher = {Association for Computational Linguistics},
pages = {272--307},
url = {http://www.aclweb.org/anthology/W18-6401}
}
"""
_LANGUAGE_PAIRS = [(lang, "en") for lang in ["cs", "de", "et", "fi", "kk", "ru", "tr", "zh"]]
class Wmt18(Wmt):
"""WMT 18 translation datasets for all {xx, "en"} language pairs."""
# Version history:
# 1.0.0: S3 (new shuffling, sharding and slicing mechanism).
BUILDER_CONFIGS = [
WmtConfig( # pylint:disable=g-complex-comprehension
description="WMT 2018 %s-%s translation task dataset." % (l1, l2),
url=_URL,
citation=_CITATION,
language_pair=(l1, l2),
version=nlp.Version("1.0.0"),
)
for l1, l2 in _LANGUAGE_PAIRS
]
@property
def _subsets(self):
return {
nlp.Split.TRAIN: [
"europarl_v7",
"europarl_v8_18",
"paracrawl_v1",
"commoncrawl",
"newscommentary_v13",
"czeng_17",
"yandexcorpus",
"wikiheadlines_fi",
"wikiheadlines_ru",
"setimes_2",
"uncorpus_v1",
"rapid_2016",
]
+ CWMT_SUBSET_NAMES,
nlp.Split.VALIDATION: ["newsdev2018", "newstest2017", "newstestB2017"],
nlp.Split.TEST: ["newstest2018"],
}
| 33.890244
| 93
| 0.61425
|
acfe72a3490272e4bbc24bb2ce674b40d242aab3
| 5,963
|
py
|
Python
|
network/unmix/source/models/unet.py
|
splitstrument/training
|
03d1edd68f3079dc1fba890eebf16fa90eb900f0
|
[
"MIT"
] | 4
|
2019-04-10T22:20:12.000Z
|
2020-01-27T17:43:48.000Z
|
network/unmix/source/models/unet.py
|
splitstrument/training
|
03d1edd68f3079dc1fba890eebf16fa90eb900f0
|
[
"MIT"
] | 3
|
2019-03-09T12:03:58.000Z
|
2020-01-27T17:42:21.000Z
|
unmix/source/models/unet.py
|
unmix-io/unmix-net
|
873d99da42f80574543c096fcd5b7c8748d2cca0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf8
"""
Keras model for training using a U-Net architecture.
"""
from keras.models import *
from keras.layers import *
from unmix.source.configuration import Configuration
from unmix.source.models.basemodel import BaseModel
# Base implementation from: https://www.kaggle.com/cjansen/u-net-in-keras
class UNetModel(BaseModel):
name = 'U-Net'
def build(self, config):
transformation = Configuration.get('transformation.options', optional=False)
concat_axis = 3
base_filter_count = 32
input_shape = (769, transformation.size, 1)
input = Input(input_shape)
conv1 = Conv2D(base_filter_count, (3, 3), padding="same", name="conv1_1",
activation="relu", data_format="channels_last")(input)
conv1 = Conv2D(base_filter_count, (3, 3), padding="same", activation="relu",
data_format="channels_last")(conv1)
pool1 = MaxPooling2D(pool_size=(
2, 2), data_format="channels_last")(conv1)
conv2 = Conv2D(base_filter_count * 2, (3, 3), padding="same", activation="relu",
data_format="channels_last")(pool1)
conv2 = Conv2D(base_filter_count * 2, (3, 3), padding="same", activation="relu",
data_format="channels_last")(conv2)
pool2 = MaxPooling2D(pool_size=(
2, 2), data_format="channels_last")(conv2)
conv3 = Conv2D(base_filter_count * 4, (3, 3), padding="same",
activation="relu", data_format="channels_last")(pool2)
conv3 = Conv2D(base_filter_count * 4, (3, 3), padding="same",
activation="relu", data_format="channels_last")(conv3)
pool3 = MaxPooling2D(pool_size=(
2, 2), data_format="channels_last")(conv3)
conv4 = Conv2D(base_filter_count * 8, (3, 3), padding="same",
activation="relu", data_format="channels_last")(pool3)
conv4 = Conv2D(base_filter_count * 8, (3, 3), padding="same",
activation="relu", data_format="channels_last")(conv4)
pool4 = MaxPooling2D(pool_size=(
2, 2), data_format="channels_last")(conv4)
conv5 = Conv2D(base_filter_count * 16, (3, 3), padding="same",
activation="relu", data_format="channels_last")(pool4)
conv5 = Conv2D(base_filter_count * 16, (3, 3), padding="same",
activation="relu", data_format="channels_last")(conv5)
up_conv5 = UpSampling2D(
size=(2, 2), data_format="channels_last")(conv5)
ch, cw = self.__crop_shape(conv4, up_conv5)
crop_conv4 = Cropping2D(
cropping=(ch, cw), data_format="channels_last")(conv4)
up6 = concatenate([up_conv5, crop_conv4], axis=concat_axis)
conv6 = Conv2D(base_filter_count * 8, (3, 3), padding="same",
activation="relu", data_format="channels_last")(up6)
conv6 = Conv2D(base_filter_count * 8, (3, 3), padding="same",
activation="relu", data_format="channels_last")(conv6)
up_conv6 = UpSampling2D(
size=(2, 2), data_format="channels_last")(conv6)
ch, cw = self.__crop_shape(conv3, up_conv6)
crop_conv3 = Cropping2D(
cropping=(ch, cw), data_format="channels_last")(conv3)
up7 = concatenate([up_conv6, crop_conv3], axis=concat_axis)
conv7 = Conv2D(base_filter_count * 4, (3, 3), padding="same",
activation="relu", data_format="channels_last")(up7)
conv7 = Conv2D(base_filter_count * 4, (3, 3), padding="same",
activation="relu", data_format="channels_last")(conv7)
up_conv7 = UpSampling2D(
size=(2, 2), data_format="channels_last")(conv7)
ch, cw = self.__crop_shape(conv2, up_conv7)
crop_conv2 = Cropping2D(
cropping=(ch, cw), data_format="channels_last")(conv2)
up8 = concatenate([up_conv7, crop_conv2], axis=concat_axis)
conv8 = Conv2D(base_filter_count * 2, (3, 3), padding="same",
activation="relu", data_format="channels_last")(up8)
conv8 = Conv2D(base_filter_count * 2, (3, 3), padding="same", activation="relu",
data_format="channels_last")(conv8)
up_conv8 = UpSampling2D(
size=(2, 2), data_format="channels_last")(conv8)
ch, cw = self.__crop_shape(conv1, up_conv8)
crop_conv1 = Cropping2D(
cropping=(ch, cw), data_format="channels_last")(conv1)
up9 = concatenate([up_conv8, crop_conv1], axis=concat_axis)
conv9 = Conv2D(base_filter_count, (3, 3), padding="same",
activation="relu", data_format="channels_last")(up9)
conv9 = Conv2D(base_filter_count, (3, 3), padding="same", activation="relu",
data_format="channels_last")(conv9)
dense1 = Dense(1, activation='relu')(conv9)
padding = ZeroPadding2D(((1, 0), (0, 0)))(dense1)
#flatten = Flatten()(conv9)
#dense1 = Dense(64, activation='relu')(flatten)
#bn = BatchNormalization()(dense1)
#dense2 = Dense(769 * transformation.step, activation='sigmoid')(bn)
#output = Reshape((769, transformation.step, 1))(dense2)
return Model(input=input, output=padding)
def __crop_shape(self, target, refer):
# 3rd dimension (width)
cw = (target.get_shape()[2] - refer.get_shape()[2]).value
assert (cw >= 0)
if cw % 2 != 0:
cw1, cw2 = cw // 2, cw // 2 + 1
else:
cw1, cw2 = cw // 2, cw // 2
# 2nd dimension (height)
ch = (target.get_shape()[1] - refer.get_shape()[1]).value
assert (ch >= 0)
if ch % 2 != 0:
ch1, ch2 = ch// 2, ch // 2 + 1
else:
ch1, ch2 = ch // 2, ch // 2
return (ch1, ch2), (cw1, cw2)
| 44.834586
| 88
| 0.589301
|
acfe736abfd681df122e043052682382525a963c
| 1,044
|
py
|
Python
|
exemplo2.py
|
Nosbielc/DoNotDoze
|
62038d401d13bf9ece1ecc5ef070a744b83d176d
|
[
"MIT"
] | null | null | null |
exemplo2.py
|
Nosbielc/DoNotDoze
|
62038d401d13bf9ece1ecc5ef070a744b83d176d
|
[
"MIT"
] | null | null | null |
exemplo2.py
|
Nosbielc/DoNotDoze
|
62038d401d13bf9ece1ecc5ef070a744b83d176d
|
[
"MIT"
] | null | null | null |
import cv2
classificadorFace = cv2.CascadeClassifier('cascades//haarcascade_frontalface_default.xml')
classificadorOlhos = cv2.CascadeClassifier('cascades//haarcascade_eye.xml')
imagem = cv2.imread('pessoas//pessoas4.jpg')
imagemCinza = cv2.cvtColor(imagem, cv2.COLOR_BGR2GRAY)
faceDectadas = classificadorFace.detectMultiScale(imagemCinza , scaleFactor=1.1,
minNeighbors=9, minSize=(30,30))
for (x , y, l, a) in faceDectadas:
imagem = cv2.rectangle(imagem, (x, y), (x + l, y + a), (0, 0, 255), 2)
regiao = imagem[y:y+a, x:x+ l]
regiaoCinzaOlho = cv2.cvtColor(regiao, cv2.COLOR_BGR2GRAY)
olhosDectados = classificadorOlhos.detectMultiScale(regiaoCinzaOlho, scaleFactor=1.1,
minNeighbors=2)
print(len(olhosDectados))
print(olhosDectados)
for (ox, oy, ol, oa) in olhosDectados:
cv2.rectangle(regiao, (ox, oy), (ox + ol, oy + oa), (255, 0, 255), 2)
cv2.imshow("Faces e olhos detectados", imagem)
cv2.waitKey()
| 38.666667
| 90
| 0.655172
|
acfe7594e87c84822c1e714bdee1113d40584e6e
| 7,647
|
py
|
Python
|
HyAsP/preprocess_fastq.py
|
fmaguire/HyAsP
|
76029d7e4052262ecbe0c19b6aeba2ad17a75c2d
|
[
"MIT"
] | 7
|
2019-07-16T21:58:22.000Z
|
2022-03-31T16:08:51.000Z
|
HyAsP/preprocess_fastq.py
|
fmaguire/HyAsP
|
76029d7e4052262ecbe0c19b6aeba2ad17a75c2d
|
[
"MIT"
] | 3
|
2019-10-27T03:03:06.000Z
|
2019-12-17T16:43:56.000Z
|
HyAsP/preprocess_fastq.py
|
fmaguire/HyAsP
|
76029d7e4052262ecbe0c19b6aeba2ad17a75c2d
|
[
"MIT"
] | 7
|
2019-07-10T23:05:36.000Z
|
2021-07-14T07:35:35.000Z
|
#!/usr/bin/env python
# Performs adapter trimming, quality filtering and length filtering with Trim Galore and sickle.
# Different combinations of (un)paired short and long reads are possible as input.
#
# The default quality and length thresholds were chosen as in Trim Galore and sickle.
#
# Requirements:
# - sickle (sickle / --sickle; tested with v1.33)
# - cutadapt (cutadapt / --cutadapt, version 1.16)
# - Trim Galore (trim_galore / --trim_galore; tested with v0.4.5_dev)
# - standard UNIX tools (rm, cat)
#
# sickle and Trim Galore have to be in $PATH or explicitly specified using the respective path options.
import os.path
from subprocess import call
# default values / constants
DEF_SICKLE_PATH = 'sickle'
DEF_CUTADAPT_PATH = 'cutadapt'
DEF_TRIM_GALORE_PATH = 'trim_galore'
DEF_QUAL_THRESHOLD = 20
DEF_MIN_LENGTH = 20
DEF_VERBOSE = False
# preprocess the FASTQ reads: (1) Trim Galore (with cutadapt), (2) sickle
def preprocess(out_dir, first_short_reads = '', second_short_reads = '', single_short_reads = '', long_reads = '',
qual_threshold = DEF_QUAL_THRESHOLD, min_length = DEF_MIN_LENGTH, verbose = DEF_VERBOSE,
sickle = DEF_SICKLE_PATH, cutadapt = DEF_CUTADAPT_PATH, trim_galore = DEF_TRIM_GALORE_PATH):
first_final = ''
second_final = ''
single_short_final = ''
long_final = ''
if first_short_reads != '' and second_short_reads != '':
first_tg = os.path.join(out_dir, os.path.splitext(os.path.basename(first_short_reads))[0] + '_val_1.fq') # name of Trim Galore output file for first_short_reads
first_final = os.path.join(out_dir, os.path.splitext(os.path.basename(first_short_reads))[0] + '_preprocessed.fastq') # name of overall preprocessing output file for first_short_reads
second_tg = os.path.join(out_dir, os.path.splitext(os.path.basename(second_short_reads))[0] + '_val_2.fq') # name of Trim Galore output file for second_short_reads
second_final = os.path.join(out_dir, os.path.splitext(os.path.basename(second_short_reads))[0] + '_preprocessed.fastq') # name of overall preprocessing output file for second_short_reads
unpaired_12_final = os.path.join(out_dir, 'unpaired_12_preprocessed.fastq') # extra file generated by sickle that contains reads that passed filter only in either the forward or reverse direction
if verbose:
print('Preprocessing %s and %s with Trim Galore...' % (first_short_reads, second_short_reads))
call('%s --paired -q %i --length %i -o %s --path_to_cutadapt %s %s %s' % (trim_galore, qual_threshold, min_length, out_dir, cutadapt, first_short_reads, second_short_reads), shell = True)
if verbose:
print('Preprocessing %s and %s with sickle...' % (first_short_reads, second_short_reads))
call('%s pe -f %s -r %s -o %s -p %s -s %s -t sanger -q %f -l %f' % (sickle, first_tg, second_tg, first_final, second_final, unpaired_12_final, qual_threshold, min_length), shell = True)
call('rm %s %s' % (first_tg, second_tg), shell = True)
if os.path.isfile(unpaired_12_final):
single_short_final = os.path.join(out_dir, 'single_short_preprocessed.fastq')
call('cat %s > %s' % (unpaired_12_final, single_short_final), shell = True)
if single_short_reads != '':
single_tg = os.path.join(out_dir, os.path.splitext(os.path.basename(single_short_reads))[0] + '_trimmed.fq') # name of Trim Galore output file for single_short_reads
single_final = os.path.join(out_dir, os.path.splitext(os.path.basename(single_short_reads))[0] + '_preprocessed.fastq') # name of overall preprocessing output file for single_short_reads
if verbose:
print('Preprocessing %s with Trim Galore...' % single_short_reads)
call('%s -q %i --length %i -o %s --path_to_cutadapt %s %s' % (trim_galore, qual_threshold, min_length, out_dir, cutadapt, single_short_reads), shell = True)
if verbose:
print('Preprocessing %s with sickle...' % single_short_reads)
call('%s se -f %s -o %s -t sanger -q %f -l %f' % (sickle, single_tg, single_final, qual_threshold, min_length), shell = True)
call('rm %s' % single_tg, shell = True)
single_short_final = os.path.join(out_dir, 'single_short_preprocessed.fastq')
call('cat %s >> %s' % (single_final, single_short_final), shell = True)
if long_reads != '':
long_tg = os.path.join(out_dir, os.path.splitext(os.path.basename(long_reads))[0] + '_trimmed.fq') # name of Trim Galore output file for long_reads
long_final = os.path.join(out_dir, os.path.splitext(os.path.basename(long_reads))[0] + '_preprocessed.fq') # name of overall preprocessing output file for long_reads
if verbose:
print('Preprocessing %s with Trim Galore...' % long_reads)
call('%s -q %i --length %i -o %s --path_to_cutadapt %s %s' % (trim_galore, qual_threshold, min_length, out_dir, cutadapt, long_reads), shell = True)
if verbose:
print('Preprocessing %s with sickle...' % long_reads)
call('%s se -f %s -o %s -t sanger -q %f -l %f' % (sickle, long_tg, long_final, qual_threshold, min_length), shell = True)
call('rm %s' % long_tg, shell = True)
return first_final, second_final, single_short_final, long_final
if __name__ == '__main__':
import argparse
argparser = argparse.ArgumentParser()
argparser.add_argument('out_dir', help = 'output directory')
argparser.add_argument('-1', '--first_short_reads', default = '', help = 'first reads of paired FASTQ read data (if any)')
argparser.add_argument('-2', '--second_short_reads', default = '', help = 'second reads of paired FASTQ read data (if any)')
argparser.add_argument('-s', '--single_short_reads', default = '', help = 'unpaired FASTQ read data (if any)')
argparser.add_argument('-l', '--long_reads', default = '', help = 'long FASTQ read data (if any)')
argparser.add_argument('-q', '--qual_threshold', type = float, default = DEF_QUAL_THRESHOLD, help = 'threshold for trimming low-quality ends')
argparser.add_argument('-m', '--min_length', type = float, default = DEF_MIN_LENGTH, help = 'minimum length of reads after quality / adapter trimming')
argparser.add_argument('--verbose', action = 'store_true', help = 'print more information')
argparser.add_argument('--sickle', default = DEF_SICKLE_PATH, help = 'path to sickle executable')
argparser.add_argument('--cutadapt', default = DEF_CUTADAPT_PATH, help = 'path to cutadapt executable')
argparser.add_argument('--trim_galore', default = DEF_TRIM_GALORE_PATH, help = 'path to Trim Galore executable')
args = argparser.parse_args()
if args.first_short_reads == '' and args.second_short_reads == '' and args.single_short_reads == '' and args.long_reads == '':
print('ERROR: No read data is specified (at least -1 / -2 or -s or -l have to be used).')
elif args.first_short_reads != '' and args.second_short_reads == '' or args.first_short_reads == '' and args.second_short_reads != '':
print('ERROR: Specified paired read data is incomplete. Both options -1 and -2 have to be used when specifying paired read data.')
else:
preprocess(args.out_dir, args.first_short_reads, second_short_reads = args.second_short_reads,
single_short_reads = args.single_short_reads, long_reads = args.long_reads,
qual_threshold = args.qual_threshold, min_length = args.min_length, verbose = args.verbose,
sickle = args.sickle, cutadapt = args.cutadapt, trim_galore = args.trim_galore)
| 66.495652
| 203
| 0.695828
|
acfe75fe33e15fa66676478098c96da26798de03
| 10,052
|
py
|
Python
|
swift/common/middleware/ratelimit.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
swift/common/middleware/ratelimit.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
swift/common/middleware/ratelimit.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import eventlet
from webob import Request, Response
from webob.exc import HTTPNotFound
from swift.common.utils import split_path, cache_from_env, get_logger
from swift.proxy.server import get_container_memcache_key
from swift.common.memcached import MemcacheConnectionError
class MaxSleepTimeHitError(Exception):
pass
class RateLimitMiddleware(object):
"""
Rate limiting middleware
Rate limits requests on both an Account and Container level. Limits are
configurable.
"""
BLACK_LIST_SLEEP = 1
def __init__(self, app, conf, logger=None):
self.app = app
if logger:
self.logger = logger
else:
self.logger = get_logger(conf, log_route='ratelimit')
self.account_ratelimit = float(conf.get('account_ratelimit', 0))
self.max_sleep_time_seconds = \
float(conf.get('max_sleep_time_seconds', 60))
self.log_sleep_time_seconds = \
float(conf.get('log_sleep_time_seconds', 0))
self.clock_accuracy = int(conf.get('clock_accuracy', 1000))
self.rate_buffer_seconds = int(conf.get('rate_buffer_seconds', 5))
self.ratelimit_whitelist = [acc.strip() for acc in
conf.get('account_whitelist', '').split(',') if acc.strip()]
self.ratelimit_blacklist = [acc.strip() for acc in
conf.get('account_blacklist', '').split(',') if acc.strip()]
self.memcache_client = None
conf_limits = []
for conf_key in conf.keys():
if conf_key.startswith('container_ratelimit_'):
cont_size = int(conf_key[len('container_ratelimit_'):])
rate = float(conf[conf_key])
conf_limits.append((cont_size, rate))
conf_limits.sort()
self.container_ratelimits = []
while conf_limits:
cur_size, cur_rate = conf_limits.pop(0)
if conf_limits:
next_size, next_rate = conf_limits[0]
slope = (float(next_rate) - float(cur_rate)) \
/ (next_size - cur_size)
def new_scope(cur_size, slope, cur_rate):
# making new scope for variables
return lambda x: (x - cur_size) * slope + cur_rate
line_func = new_scope(cur_size, slope, cur_rate)
else:
line_func = lambda x: cur_rate
self.container_ratelimits.append((cur_size, cur_rate, line_func))
def get_container_maxrate(self, container_size):
"""
Returns number of requests allowed per second for given container size.
"""
last_func = None
if container_size:
container_size = int(container_size)
for size, rate, func in self.container_ratelimits:
if container_size < size:
break
last_func = func
if last_func:
return last_func(container_size)
return None
def get_ratelimitable_key_tuples(self, req_method, account_name,
container_name=None, obj_name=None):
"""
Returns a list of key (used in memcache), ratelimit tuples. Keys
should be checked in order.
:param req_method: HTTP method
:param account_name: account name from path
:param container_name: container name from path
:param obj_name: object name from path
"""
keys = []
# COPYs are not limited
if self.account_ratelimit and \
account_name and container_name and not obj_name and \
req_method in ('PUT', 'DELETE'):
keys.append(("ratelimit/%s" % account_name,
self.account_ratelimit))
if account_name and container_name and obj_name and \
req_method in ('PUT', 'DELETE', 'POST'):
container_size = None
memcache_key = get_container_memcache_key(account_name,
container_name)
container_info = self.memcache_client.get(memcache_key)
if isinstance(container_info, dict):
container_size = container_info.get('container_size', 0)
container_rate = self.get_container_maxrate(container_size)
if container_rate:
keys.append(("ratelimit/%s/%s" % (account_name,
container_name),
container_rate))
return keys
def _get_sleep_time(self, key, max_rate):
'''
Returns the amount of time (a float in seconds) that the app
should sleep.
:param key: a memcache key
:param max_rate: maximum rate allowed in requests per second
:raises: MaxSleepTimeHitError if max sleep time is exceeded.
'''
try:
now_m = int(round(time.time() * self.clock_accuracy))
time_per_request_m = int(round(self.clock_accuracy / max_rate))
running_time_m = self.memcache_client.incr(key,
delta=time_per_request_m)
need_to_sleep_m = 0
if (now_m - running_time_m >
self.rate_buffer_seconds * self.clock_accuracy):
next_avail_time = int(now_m + time_per_request_m)
self.memcache_client.set(key, str(next_avail_time),
serialize=False)
else:
need_to_sleep_m = \
max(running_time_m - now_m - time_per_request_m, 0)
max_sleep_m = self.max_sleep_time_seconds * self.clock_accuracy
if max_sleep_m - need_to_sleep_m <= self.clock_accuracy * 0.01:
# treat as no-op decrement time
self.memcache_client.decr(key, delta=time_per_request_m)
raise MaxSleepTimeHitError("Max Sleep Time Exceeded: %.2f" %
(float(need_to_sleep_m) / self.clock_accuracy))
return float(need_to_sleep_m) / self.clock_accuracy
except MemcacheConnectionError:
return 0
def handle_ratelimit(self, req, account_name, container_name, obj_name):
'''
Performs rate limiting and account white/black listing. Sleeps
if necessary.
:param account_name: account name from path
:param container_name: container name from path
:param obj_name: object name from path
'''
if account_name in self.ratelimit_blacklist:
self.logger.error(_('Returning 497 because of blacklisting: %s'),
account_name)
eventlet.sleep(self.BLACK_LIST_SLEEP)
return Response(status='497 Blacklisted',
body='Your account has been blacklisted', request=req)
if account_name in self.ratelimit_whitelist:
return None
for key, max_rate in self.get_ratelimitable_key_tuples(
req.method, account_name, container_name=container_name,
obj_name=obj_name):
try:
need_to_sleep = self._get_sleep_time(key, max_rate)
if self.log_sleep_time_seconds and \
need_to_sleep > self.log_sleep_time_seconds:
self.logger.warning(_("Ratelimit sleep log: %(sleep)s for "
"%(account)s/%(container)s/%(object)s"),
{'sleep': need_to_sleep, 'account': account_name,
'container': container_name, 'object': obj_name})
if need_to_sleep > 0:
eventlet.sleep(need_to_sleep)
except MaxSleepTimeHitError, e:
self.logger.error(_('Returning 498 for %(meth)s to '
'%(acc)s/%(cont)s/%(obj)s . Ratelimit (Max Sleep) %(e)s'),
{'meth': req.method, 'acc': account_name,
'cont': container_name, 'obj': obj_name, 'e': str(e)})
error_resp = Response(status='498 Rate Limited',
body='Slow down', request=req)
return error_resp
return None
def __call__(self, env, start_response):
"""
WSGI entry point.
Wraps env in webob.Request object and passes it down.
:param env: WSGI environment dictionary
:param start_response: WSGI callable
"""
req = Request(env)
if self.memcache_client is None:
self.memcache_client = cache_from_env(env)
if not self.memcache_client:
self.logger.warning(
_('Warning: Cannot ratelimit without a memcached client'))
return self.app(env, start_response)
try:
version, account, container, obj = split_path(req.path, 1, 4, True)
except ValueError:
return self.app(env, start_response)
ratelimit_resp = self.handle_ratelimit(req, account, container, obj)
if ratelimit_resp is None:
return self.app(env, start_response)
else:
return ratelimit_resp(env, start_response)
def filter_factory(global_conf, **local_conf):
"""
paste.deploy app factory for creating WSGI proxy apps.
"""
conf = global_conf.copy()
conf.update(local_conf)
def limit_filter(app):
return RateLimitMiddleware(app, conf)
return limit_filter
| 41.709544
| 79
| 0.59809
|
acfe76002ec2fb289da428fa0fc4bac93367ad94
| 6,767
|
py
|
Python
|
src/main/managers/conflict/base_conflict_manager.py
|
malcolmwhite/DungeonsAndDragons
|
f38f36f29b72a2228005756015d400f49e4339b6
|
[
"MIT"
] | 4
|
2015-09-09T23:22:26.000Z
|
2020-01-04T20:01:06.000Z
|
src/main/managers/conflict/base_conflict_manager.py
|
malcolmwhite/DungeonsAndDragons
|
f38f36f29b72a2228005756015d400f49e4339b6
|
[
"MIT"
] | null | null | null |
src/main/managers/conflict/base_conflict_manager.py
|
malcolmwhite/DungeonsAndDragons
|
f38f36f29b72a2228005756015d400f49e4339b6
|
[
"MIT"
] | null | null | null |
import logging
from random import shuffle
from src.main.utils.utils import join_multi_line_strings
class BaseConflictManager(object):
"""
Abstract class with core functionality for managing conflicts.
Implementations must implement :py:meth:`base_conflict_manager.BaseConflictManager._pick_conflict` and
:py:meth:`base_conflict_manager.BaseConflictManager._sort_players`.
Attributes:
player_manager (Implementation of BasePlayerManager): Builds and maintains collection of players
"""
def __init__(self, player_manager):
self.player_manager = player_manager
self.LOG = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
def run_conflict(self):
"""
Runs conflict. Conflict will continue until one player remains
:return: (BasePlayer) winning player
"""
active_players = self.player_manager.get_active_players()
while len(active_players) > 1:
self._run_round(active_players)
active_players = self.player_manager.get_active_players()
return active_players[0]
def _determine_player_to_challenge(self, challenger, players):
"""
Abstract method for specifying which player a given player will challenge
:param challenger (BasePlayer): Player picking a player to challenge
:param players: List of players to challenge
:raise NotImplementedError: Method is abstract and must be overridden
"""
raise NotImplementedError("_pick_conflict has not been implemented.")
def _run_round(self, players):
"""
Execute a round of conflicts for the given players.
:param players: List of players
"""
self.LOG.info("Beginning round.")
players = self._order_players_for_new_round(players)
challenge_map = self._build_challenge_map(players)
for challenger in players:
if self.player_manager.get_num_active_players() > 1:
challenged = challenge_map[challenger]
self._run_confrontation(challenger, challenged)
def _build_challenge_map(self, players):
"""
Maps players to the players they wish to challenge.
Override :py:meth:`base_conflict_manager.BaseConflictManager._pick_conflict` to specify behavior
:param players: list of challenging players
:return: Map of challenging player to challenged player
"""
challenge_map = dict()
for index, challenger in enumerate(players):
challenged = self._determine_player_to_challenge(challenger, players)
challenge_map[challenger] = challenged
return challenge_map
def _run_confrontation(self, challenger, challenged):
"""
Enact a confrontation between two players.
:param challenger: Challenging player
:param challenged: Challenged player
"""
challenger.initialize_confrontation()
challenged.initialize_confrontation()
self.LOG.info("Beginning confrontation. %s challenging %s.", challenger.NAME, challenged.NAME)
if not challenger.is_active():
self.LOG.info("Challenger %s is not active.", challenger.NAME)
elif not challenged.is_active():
self.LOG.info("Challenged player %s is not active.", challenged.NAME)
else:
spook_rate, spook_power = challenged.get_spook_rate_and_power()
spook_success = challenger.receive_spook(spook_rate, spook_power)
if spook_success:
self.LOG.info("%s spooked %s.", challenged.NAME, challenger.NAME)
attack_points = challenger.get_effective_attack()
damage_inflicted = challenged.receive_attack(attack_points)
self.LOG.info("%s inflicted %d damage to %s.", challenger.NAME, damage_inflicted, challenged.NAME)
if not challenged.is_active():
self.LOG.info("%s has defeated %s and takes all of their items.", challenger.NAME, challenged.NAME)
items_won = challenged.dump_all_items()
challenger.add_items(items_won)
BaseConflictManager._finalize_confrontation(challenger, challenged)
@staticmethod
def _finalize_confrontation(challenger, challenged):
"""
Finalize players and log confrontation results.
:param challenger: Challenging player
:param challenged: Challenged player
"""
challenger.finalize_confrontation()
challenged.finalize_confrontation()
BaseConflictManager._log_player_results(True, challenger, challenged)
@staticmethod
def _log_player_results(only_active, *players):
"""
Log the summaries of the given players.
:param only_active (bool): Indicates if results should only be shown for active players
:param players: List of players
"""
players = list(players)
players.sort(key=lambda p: p.NAME)
cell_width = 25
overall_summary = []
for player in players:
if player.is_active() or not only_active:
player_summary = player.get_summary()
overall_summary.append(player_summary)
print join_multi_line_strings(overall_summary, cell_width)
def _order_players_for_new_round(self, players):
# Sort players by priority
players.sort(key=lambda p: p.get_round_priority(), reverse=True)
# Shuffle within priorities
left_index = 0
last_priority = None
for current_index, player in enumerate(players):
current_priority = player.get_round_priority()
if last_priority is not None:
if last_priority is not current_priority:
right_index = current_index
self._shuffle_slice(players, left_index, right_index)
left_index = right_index
last_priority = current_priority
# shuffle the last section
right_index = len(players) - 1
self._shuffle_slice(players, left_index, right_index)
log_msg = "Player order is: "
for player in players:
log_msg += player.NAME + ", "
# Trim the final space and comma
log_msg = log_msg[:-2]
self.LOG.info(log_msg)
return players
@staticmethod
def _validate_conflict_pair(challenger, challenged):
return challenger.NAME.lower() != challenged.NAME.lower()
@staticmethod
def _shuffle_slice(container, left_index, right_index):
slice_to_shuffle = container[left_index:right_index]
shuffle(slice_to_shuffle)
container[left_index:right_index] = slice_to_shuffle
| 42.29375
| 115
| 0.669277
|
acfe768f0c19bf30041057833288aa0085dc34d7
| 235
|
py
|
Python
|
fastai/callback/all.py
|
PalaashAgrawal/fastai
|
6148ff303d9b8a7fa8730ec01e81820af0515be3
|
[
"Apache-2.0"
] | 23,140
|
2017-09-09T18:23:40.000Z
|
2022-03-31T11:49:36.000Z
|
fastai/callback/all.py
|
PalaashAgrawal/fastai
|
6148ff303d9b8a7fa8730ec01e81820af0515be3
|
[
"Apache-2.0"
] | 3,077
|
2017-09-16T07:08:31.000Z
|
2022-03-31T20:14:16.000Z
|
fastai/callback/all.py
|
PalaashAgrawal/fastai
|
6148ff303d9b8a7fa8730ec01e81820af0515be3
|
[
"Apache-2.0"
] | 8,740
|
2017-09-11T02:19:40.000Z
|
2022-03-31T11:29:18.000Z
|
from .core import *
from .data import *
from .fp16 import *
from .hook import *
from .mixup import *
from .progress import *
from .schedule import *
from .tracker import *
from .rnn import *
from .training import *
from .preds import *
| 21.363636
| 23
| 0.723404
|
acfe76d1cf460cbf015087c4553c2d605c8052dd
| 4,987
|
py
|
Python
|
grocery/customer/serializers.py
|
DeepakDk04/bigbasketClone
|
82820d93876a2c3e6caec2725b1c6078e79e3bfb
|
[
"MIT"
] | null | null | null |
grocery/customer/serializers.py
|
DeepakDk04/bigbasketClone
|
82820d93876a2c3e6caec2725b1c6078e79e3bfb
|
[
"MIT"
] | null | null | null |
grocery/customer/serializers.py
|
DeepakDk04/bigbasketClone
|
82820d93876a2c3e6caec2725b1c6078e79e3bfb
|
[
"MIT"
] | null | null | null |
from deliveryservice.models import DeliveryServicer, DeliveryServicerProfile
from rest_framework.serializers import ModelSerializer
from django.contrib.auth.models import User
from products.models import Product
from order.models import Cart, CartItem, Order, OrderItem
from .models import Customer, CustomerProfile, DeliveryAddress
class ProfileCreateSerializer(ModelSerializer):
class Meta:
model = CustomerProfile
fields = ['id', 'user', 'age', 'gender', 'contactno']
class userModelCustomSerializer(ModelSerializer):
# imported in other modules
class Meta:
model = User
fields = ['id', 'username', 'email',
'first_name', 'last_name']
# class ProfileViewSerializer(ModelSerializer):
# user = userModelCustomSerializer()
# class Meta:
# model = CustomerProfile
# fields = ['id', 'user', 'age', 'gender', 'contactno']
# depth = 1
class ProfileUpdateSerializer(ModelSerializer):
user = userModelCustomSerializer()
class Meta:
model = CustomerProfile
fields = ['id', 'user', 'age', 'gender', 'contactno']
# read_only_fields = ('user',)
# class ProfileDeleteSerializer(ModelSerializer):
# class Meta:
# model = CustomerProfile
# fields = ['id']
class AddressCreateOrViewOrUpdateSerializer(ModelSerializer):
class Meta:
model = DeliveryAddress
fields = ['id', 'doorno', 'street', 'area', 'landmark']
# class AddressDeleteSerializer(ModelSerializer):
# class Meta:
# model = DeliveryAddress
# fields = ['id']
class CustomerCreateOrUpdateSerializer(ModelSerializer):
class Meta:
model = Customer
fields = ['id', 'profile', 'address', 'cart', 'myorders']
# class CustomerUpdateCustom__UserSerializer(ModelSerializer):
# class Meta:
# model = User
# fields = ['id', 'username', 'email', 'first_name', 'last_name']
class CustomerView__ProfileSerializer(ModelSerializer):
user = userModelCustomSerializer()
class Meta:
model = CustomerProfile
fields = '__all__'
class CustomerView__MyOrders__Items__ProductSerializer(ModelSerializer):
class Meta:
model = Product
fields = ('name',)
class CustomerView__MyOrders__ItemsSerializer(ModelSerializer):
product = CustomerView__MyOrders__Items__ProductSerializer()
class Meta:
model = OrderItem
exclude = ('id',)
class CustomerView__MyOrdersSerializer(ModelSerializer):
items = CustomerView__MyOrders__ItemsSerializer(many=True)
class Meta:
model = Order
exclude = ('id', 'orderbycustomer', 'toaddress')
class CustomerView__Cart__Items__ProductSerializer(ModelSerializer):
class Meta:
model = Product
fields = ('name',)
class CustomerView__Cart__ItemsSerializer(ModelSerializer):
product = CustomerView__Cart__Items__ProductSerializer()
class Meta:
model = CartItem
fields = "__all__"
class CustomerView__CartSerializer(ModelSerializer):
items = CustomerView__Cart__ItemsSerializer(many=True)
class Meta:
model = Cart
fields = "__all__"
depth = 1
class CustomerViewSerializer(ModelSerializer):
profile = CustomerView__ProfileSerializer()
myorders = CustomerView__MyOrdersSerializer(many=True)
cart = CustomerView__CartSerializer()
class Meta:
model = Customer
fields = ['id', 'profile', 'address', 'cart', 'myorders']
depth = 2
class CustomerUpdateCustomSerializer(ModelSerializer):
profile = CustomerView__ProfileSerializer()
class Meta:
model = Customer
fields = ['id', 'profile', 'address']
depth = 2
class CustomerDeleteSerializer(ModelSerializer):
class Meta:
model = Customer
fields = ['id']
class OrderShipper__Profile__UserSerializer(ModelSerializer):
# imported in other modules
class Meta:
model = User
fields = ('username', 'email', 'first_name', 'last_name')
class OrderShipper__ProfileSerializer(ModelSerializer):
user = OrderShipper__Profile__UserSerializer()
class Meta:
model = DeliveryServicerProfile
exclude = ('id',)
depth = 1
class CustomerPlacedOrders__OrderShipperSerializer(ModelSerializer):
profile = OrderShipper__ProfileSerializer()
class Meta:
model = DeliveryServicer
fields = ('profile', )
class CustomerPlacedOrders__ToAddressSerializer(ModelSerializer):
class Meta:
model = DeliveryAddress
exclude = ('id',)
class CustomerPlacedOrdersSerializer(ModelSerializer):
items = CustomerView__MyOrders__ItemsSerializer(many=True)
ordershipper = CustomerPlacedOrders__OrderShipperSerializer()
toaddress = CustomerPlacedOrders__ToAddressSerializer()
class Meta:
model = Order
exclude = ('id', 'orderbycustomer')
depth = 2
| 23.303738
| 76
| 0.683377
|
acfe78f5fe69cf7a5c25609fa887aa6186cc5701
| 6,607
|
py
|
Python
|
test/IECoreScene/ObjectInterpolationTest.py
|
carlosg-ie/cortex
|
4c866042bd2a6a6bb2076fbbe46a4e28ce54cc59
|
[
"BSD-3-Clause"
] | null | null | null |
test/IECoreScene/ObjectInterpolationTest.py
|
carlosg-ie/cortex
|
4c866042bd2a6a6bb2076fbbe46a4e28ce54cc59
|
[
"BSD-3-Clause"
] | null | null | null |
test/IECoreScene/ObjectInterpolationTest.py
|
carlosg-ie/cortex
|
4c866042bd2a6a6bb2076fbbe46a4e28ce54cc59
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2007-2009, Image Engine Design Inc. All rights reserved.
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import IECore
import IECoreScene
class ObjectInterpolationTest( unittest.TestCase ) :
def testPrimitiveInterpolation( self ) :
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = IECoreScene.TransformOp()( input=m1, matrix = IECore.M44fData( imath.M44f().scale( imath.V3f( 2 ) ) ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3, IECoreScene.TransformOp()( input=m1, matrix = IECore.M44fData( imath.M44f().scale( imath.V3f( 1.5 ) ) ) ) )
def testPrimitiveInterpolationMaintainsUninterpolableValuesFromFirstPrimitive( self ) :
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m1["c"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.StringData( "hi" ) )
m2 = m1.copy()
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3["c"], IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.StringData( "hi" ) ) )
m2["c"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.StringData( "bye" ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3["c"], IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.StringData( "hi" ) ) )
def testPrimitiveInterpolationMaintainsValuesMissingFromSecondPrimitive( self ) :
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m1["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2, 3, 4 ] ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3["v"], IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2, 3, 4 ] ) ) )
def testPrimitiveInterpolationWithBlindData( self ) :
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = m1.copy()
m1.blindData()["a"] = IECore.FloatData( 10 )
m2.blindData()["a"] = IECore.FloatData( 20 )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3.blindData()["a"], IECore.FloatData( 15 ) )
def testPrimitiveInterpolationWithBlindDataMaintainsValuesMissingFromSecondPrimitive( self ) :
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = m1.copy()
m1.blindData()["a"] = IECore.FloatData( 10 )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertEqual( m3.blindData()["a"], IECore.FloatData( 10 ) )
def testPrimVarsWithDifferingDataArraysAreSkipped( self ):
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = m1.copy()
m1["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2, 1, 2] ), IECore.IntVectorData( [ 0, 1, 2, 3 ] ) )
m2["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2 ] ), IECore.IntVectorData( [ 0, 1, 0, 1 ] ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertTrue( "v" in m3 )
self.assertEqual( m3["v"], m1["v"])
def testPrimVarsWithDifferentIndicesAreSkipped( self ):
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = m1.copy()
m1["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2, 3, 4] ), IECore.IntVectorData( [ 0, 1, 2, 3 ] ) )
m2["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 4, 3, 2, 1] ), IECore.IntVectorData( [ 3, 2, 1, 0 ] ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertTrue( "v" in m3 )
self.assertEqual( m3["v"], m1["v"])
def testPrimVarInterpolationChangeSkipped( self ):
m1 = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
m2 = m1.copy()
m1["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 1, 2, 3, 4] ), IECore.IntVectorData( [ 0, 1, 2, 3 ] ) )
m2["v"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, IECore.FloatVectorData( [ 4, 3, 2, 1] ) )
m3 = IECore.linearObjectInterpolation( m1, m2, 0.5 )
self.assertTrue( "v" in m3 )
self.assertEqual( m3["v"], m1["v"])
if __name__ == "__main__":
unittest.main()
| 47.532374
| 176
| 0.710005
|
acfe79c4ee768217fe8835f235f88e8656059100
| 6,569
|
py
|
Python
|
src/react/properties.py
|
tomgilbertson/script-server-v1
|
bbdf289d3d993a0c81f20c36bce5f3eb064b0261
|
[
"Apache-2.0",
"CC0-1.0"
] | 833
|
2016-09-08T13:27:36.000Z
|
2022-03-27T07:10:48.000Z
|
src/react/properties.py
|
tomgilbertson/script-server-v1
|
bbdf289d3d993a0c81f20c36bce5f3eb064b0261
|
[
"Apache-2.0",
"CC0-1.0"
] | 528
|
2016-05-23T09:17:04.000Z
|
2022-03-30T12:45:50.000Z
|
src/react/properties.py
|
tomgilbertson/script-server-v1
|
bbdf289d3d993a0c81f20c36bce5f3eb064b0261
|
[
"Apache-2.0",
"CC0-1.0"
] | 214
|
2016-09-08T14:46:41.000Z
|
2022-03-25T01:04:14.000Z
|
from collections import UserList, UserDict
from typing import Optional, Iterable as Iterable, Mapping as Mapping, TypeVar
_T = TypeVar('_T')
_KT = TypeVar('_KT')
_VT = TypeVar('_VT')
class Property:
def __init__(self, value=None):
self._value = value
self._observers = []
self.bound = False
def subscribe(self, observer):
self._observers.append(observer)
def unsubscribe(self, observer):
self._observers.remove(observer)
def set(self, new_value):
if self.bound:
raise Exception('Failed to set value to bound property')
self._set_internal(new_value)
def _set_internal(self, new_value):
old_value = self._value
if old_value == new_value:
return
self._value = new_value
for observer in self._observers:
observer(old_value, new_value)
def get(self):
return self._value
def bind(self, another_property, map_function=None):
def binder(old_value, new_value):
if map_function:
value = map_function(new_value)
else:
value = new_value
self._set_internal(value)
another_property.subscribe(binder)
binder(None, another_property.get())
class ObservableList(UserList):
def __init__(self, initlist: Optional[Iterable[_T]] = None) -> None:
super().__init__()
self._observers = []
if initlist:
self.extend(initlist)
def subscribe(self, observer):
self._observers.append(observer)
def unsubscribe(self, observer):
self._observers.remove(observer)
def append(self, item: _T) -> None:
super().append(item)
for observer in self._observers:
observer.on_add(item, len(self.data) - 1)
def insert(self, i: int, item: _T) -> None:
super().insert(i, item)
for observer in self._observers:
observer.on_add(item, i)
def pop(self, i: int = ...) -> _T:
item = super().pop(i)
for observer in self._observers:
observer.on_remove(item)
return item
def remove(self, item: _T) -> None:
super().remove(item)
for observer in self._observers:
observer.on_remove(item)
def clear(self) -> None:
copy = list(self.data)
super().clear()
for item in copy:
for observer in self._observers:
observer.on_remove(item)
def extend(self, other: Iterable[_T]) -> None:
first_index = len(self.data)
super().extend(other)
for i, item in enumerate(other):
for observer in self._observers:
observer.on_add(item, first_index + i)
class ObservableDict(UserDict):
def __init__(self, dict: Optional[Mapping[_KT, _VT]] = None, **kwargs: _VT) -> None:
super().__init__(**kwargs)
self._observers = []
if dict:
self.update(dict)
def subscribe(self, observer):
self._observers.append(observer)
def unsubscribe(self, observer):
if observer in self._observers:
self._observers.remove(observer)
def set(self, another_dict):
old_values = dict(self)
obsolete_keys = {key for key in self.keys() if key not in another_dict}
for key in obsolete_keys:
super().__delitem__(key)
for key, value in another_dict.items():
super().__setitem__(key, value)
if self._observers:
for obsolete_key in obsolete_keys:
old_value = old_values[obsolete_key]
for observer in self._observers:
observer(obsolete_key, old_value, None)
for key, value in self.items():
old_value = old_values.get(key)
if old_value != value:
for observer in self._observers:
observer(key, old_value, value)
def __setitem__(self, key: _KT, item: _VT) -> None:
old_value = self.get(key)
super().__setitem__(key, item)
if self._observers:
for observer in self._observers:
observer(key, old_value, item)
def __delitem__(self, key: _KT) -> None:
old_value = self.get(key)
super().__delitem__(key)
if old_value is None:
return
if self._observers:
for observer in self._observers:
observer(key, old_value, None)
def observable_fields(*fields):
def wrapper(cls):
def subscribe(self, listener):
if not hasattr(self, '_listeners'):
setattr(self, '_listeners', [])
self._listeners.append(listener)
setattr(cls, 'subscribe', subscribe)
for field_name in fields:
prop_name = field_name + '_prop'
class ObservableProperty:
def __init__(self, prop_name):
self._prop_name = prop_name
def __get__(self, instance, type=None):
if self._prop_name not in instance.__dict__:
p = Property()
setattr(instance, self._prop_name, p)
return p
return instance.__dict__[self._prop_name]
class ObservableValueProperty:
def __init__(self, prop_name, field_name):
self._prop_name = prop_name
self._field_name = field_name
def __get__(self, instance, type=None):
return getattr(instance, self._prop_name).get()
def __set__(self, instance, value, type=None):
property = getattr(instance, self._prop_name)
old_value = property.get()
property.set(value)
if old_value != value:
if hasattr(instance, '_listeners'):
for listener in instance._listeners:
listener(self._field_name, old_value, value)
setattr(cls, prop_name, ObservableProperty(prop_name))
setattr(cls, field_name, ObservableValueProperty(prop_name, field_name))
return cls
return wrapper
def mapped_property(property, map_function):
result = Property()
def updater(old_value, new_value):
new_mapped = map_function(new_value)
result.set(new_mapped)
property.subscribe(updater)
return result
| 28.314655
| 88
| 0.576648
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.