text stringlengths 0 1.05M | meta dict |
|---|---|
from functools import partial
from graphql.utilities import build_schema
from graphql.validation.rules.unique_type_names import UniqueTypeNamesRule
from .harness import assert_sdl_validation_errors
assert_errors = partial(assert_sdl_validation_errors, UniqueTypeNamesRule)
assert_valid = partial(assert_errors, errors=[])
def describe_validate_unique_type_names():
def no_types():
assert_valid(
"""
directive @test on SCHEMA
"""
)
def one_type():
assert_valid(
"""
type Foo
"""
)
def many_types():
assert_valid(
"""
type Foo
type Bar
type Baz
"""
)
def type_and_non_type_definitions_named_the_same():
assert_valid(
"""
query Foo { __typename }
fragment Foo on Query { __typename }
directive @Foo on SCHEMA
type Foo
"""
)
def types_named_the_same():
assert_errors(
"""
type Foo
scalar Foo
type Foo
interface Foo
union Foo
enum Foo
input Foo
""",
[
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (4, 20)],
},
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (5, 18)],
},
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (6, 23)],
},
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (7, 19)],
},
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (8, 18)],
},
{
"message": "There can be only one type named 'Foo'.",
"locations": [(2, 18), (9, 19)],
},
],
)
def adding_new_type_to_existing_schema():
schema = build_schema("type Foo")
assert_valid("type Bar", schema=schema)
def adding_new_type_to_existing_schema_with_same_named_directive():
schema = build_schema("directive @Foo on SCHEMA")
assert_valid("type Foo", schema=schema)
def adding_conflicting_types_to_existing_schema():
schema = build_schema("type Foo")
sdl = """
scalar Foo
type Foo
interface Foo
union Foo
enum Foo
input Foo
"""
assert_errors(
sdl,
[
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(2, 20)],
},
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(3, 18)],
},
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(4, 23)],
},
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(5, 19)],
},
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(6, 18)],
},
{
"message": "Type 'Foo' already exists in the schema."
" It cannot also be defined in this type definition.",
"locations": [(7, 19)],
},
],
schema,
)
| {
"repo_name": "graphql-python/graphql-core",
"path": "tests/validation/test_unique_type_names.py",
"copies": "1",
"size": "4349",
"license": "mit",
"hash": -4395797047899268600,
"line_mean": 29.2013888889,
"line_max": 74,
"alpha_frac": 0.4200965739,
"autogenerated": false,
"ratio": 5.068764568764569,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 144
} |
from functools import partial
from graphql.validation import VariablesInAllowedPositionRule
from .harness import assert_validation_errors
assert_errors = partial(assert_validation_errors, VariablesInAllowedPositionRule)
assert_valid = partial(assert_errors, errors=[])
def describe_validate_variables_are_in_allowed_positions():
def boolean_to_boolean():
assert_valid(
"""
query Query($booleanArg: Boolean)
{
complicatedArgs {
booleanArgField(booleanArg: $booleanArg)
}
}
"""
)
def boolean_to_boolean_in_fragment():
assert_valid(
"""
fragment booleanArgFrag on ComplicatedArgs {
booleanArgField(booleanArg: $booleanArg)
}
query Query($booleanArg: Boolean)
{
complicatedArgs {
...booleanArgFrag
}
}
"""
)
assert_valid(
"""
query Query($booleanArg: Boolean)
{
complicatedArgs {
...booleanArgFrag
}
}
fragment booleanArgFrag on ComplicatedArgs {
booleanArgField(booleanArg: $booleanArg)
}
"""
)
def non_null_boolean_to_boolean():
assert_valid(
"""
query Query($nonNullBooleanArg: Boolean!)
{
complicatedArgs {
booleanArgField(booleanArg: $nonNullBooleanArg)
}
}
"""
)
def non_null_boolean_to_boolean_within_fragment():
assert_valid(
"""
fragment booleanArgFrag on ComplicatedArgs {
booleanArgField(booleanArg: $nonNullBooleanArg)
}
query Query($nonNullBooleanArg: Boolean!)
{
complicatedArgs {
...booleanArgFrag
}
}
"""
)
def array_of_string_to_array_of_string():
assert_valid(
"""
query Query($stringListVar: [String])
{
complicatedArgs {
stringListArgField(stringListArg: $stringListVar)
}
}
"""
)
def array_of_non_null_string_to_array_of_string():
assert_valid(
"""
query Query($stringListVar: [String!])
{
complicatedArgs {
stringListArgField(stringListArg: $stringListVar)
}
}
"""
)
def string_to_array_of_string_in_item_position():
assert_valid(
"""
query Query($stringVar: String)
{
complicatedArgs {
stringListArgField(stringListArg: [$stringVar])
}
}
"""
)
def non_null_string_to_array_of_string_in_item_position():
assert_valid(
"""
query Query($stringVar: String!)
{
complicatedArgs {
stringListArgField(stringListArg: [$stringVar])
}
}
"""
)
def complex_input_to_complex_input():
assert_valid(
"""
query Query($complexVar: ComplexInput)
{
complicatedArgs {
complexArgField(complexArg: $complexVar)
}
}
"""
)
def complex_input_to_complex_input_in_field_position():
assert_valid(
"""
query Query($boolVar: Boolean = false)
{
complicatedArgs {
complexArgField(complexArg: {requiredArg: $boolVar})
}
}
"""
)
def non_null_boolean_to_non_null_boolean_in_directive():
assert_valid(
"""
query Query($boolVar: Boolean!)
{
dog @include(if: $boolVar)
}
"""
)
def int_to_non_null_int():
assert_errors(
"""
query Query($intArg: Int) {
complicatedArgs {
nonNullIntArgField(nonNullIntArg: $intArg)
}
}
""",
[
{
"message": "Variable '$intArg' of type 'Int'"
" used in position expecting type 'Int!'.",
"locations": [(2, 25), (4, 51)],
}
],
)
def int_to_non_null_int_within_fragment():
assert_errors(
"""
fragment nonNullIntArgFieldFrag on ComplicatedArgs {
nonNullIntArgField(nonNullIntArg: $intArg)
}
query Query($intArg: Int) {
complicatedArgs {
...nonNullIntArgFieldFrag
}
}
""",
[
{
"message": "Variable '$intArg' of type 'Int'"
" used in position expecting type 'Int!'.",
"locations": [(6, 25), (3, 49)],
}
],
)
def int_to_non_null_int_within_nested_fragment():
assert_errors(
"""
fragment outerFrag on ComplicatedArgs {
...nonNullIntArgFieldFrag
}
fragment nonNullIntArgFieldFrag on ComplicatedArgs {
nonNullIntArgField(nonNullIntArg: $intArg)
}
query Query($intArg: Int) {
complicatedArgs {
...outerFrag
}
}
""",
[
{
"message": "Variable '$intArg' of type 'Int'"
" used in position expecting type 'Int!'.",
"locations": [(10, 25), (7, 49)],
}
],
)
def string_to_boolean():
assert_errors(
"""
query Query($stringVar: String) {
complicatedArgs {
booleanArgField(booleanArg: $stringVar)
}
}
""",
[
{
"message": "Variable '$stringVar' of type 'String'"
" used in position expecting type 'Boolean'.",
"locations": [(2, 25), (4, 45)],
}
],
)
def string_to_array_of_string():
assert_errors(
"""
query Query($stringVar: String) {
complicatedArgs {
stringListArgField(stringListArg: $stringVar)
}
}
""",
[
{
"message": "Variable '$stringVar' of type 'String'"
" used in position expecting type '[String]'.",
"locations": [(2, 25), (4, 51)],
}
],
)
def boolean_to_non_null_boolean_in_directive():
assert_errors(
"""
query Query($boolVar: Boolean) {
dog @include(if: $boolVar)
}
""",
[
{
"message": "Variable '$boolVar' of type 'Boolean'"
" used in position expecting type 'Boolean!'.",
"locations": [(2, 25), (3, 32)],
}
],
)
def string_to_non_null_boolean_in_directive():
assert_errors(
"""
query Query($stringVar: String) {
dog @include(if: $stringVar)
}
""",
[
{
"message": "Variable '$stringVar' of type 'String'"
" used in position expecting type 'Boolean!'.",
"locations": [(2, 25), (3, 32)],
}
],
)
def array_of_string_to_array_of_non_null_string():
assert_errors(
"""
query Query($stringListVar: [String])
{
complicatedArgs {
stringListNonNullArgField(stringListNonNullArg: $stringListVar)
}
}
""",
[
{
"message": "Variable '$stringListVar' of type '[String]'"
" used in position expecting type '[String!]'.",
"locations": [(2, 25), (5, 65)],
}
],
)
def describe_allows_optional_nullable_variables_with_default_values():
def int_to_non_null_int_fails_when_var_provides_null_default_value():
assert_errors(
"""
query Query($intVar: Int = null) {
complicatedArgs {
nonNullIntArgField(nonNullIntArg: $intVar)
}
}
""",
[
{
"message": "Variable '$intVar' of type 'Int'"
" used in position expecting type 'Int!'.",
"locations": [(2, 29), (4, 55)],
}
],
)
def int_to_non_null_int_when_var_provides_non_null_default_value():
assert_valid(
"""
query Query($intVar: Int = 1) {
complicatedArgs {
nonNullIntArgField(nonNullIntArg: $intVar)
}
}
"""
)
def int_to_non_null_int_when_optional_arg_provides_default_value():
assert_valid(
"""
query Query($intVar: Int) {
complicatedArgs {
nonNullFieldWithDefault(nonNullIntArg: $intVar)
}
}
"""
)
def bool_to_non_null_bool_in_directive_with_default_value_with_option():
assert_valid(
"""
query Query($boolVar: Boolean = false) {
dog @include(if: $boolVar)
}
"""
)
| {
"repo_name": "graphql-python/graphql-core",
"path": "tests/validation/test_variables_in_allowed_position.py",
"copies": "1",
"size": "10190",
"license": "mit",
"hash": 7860963099599536000,
"line_mean": 26.9178082192,
"line_max": 81,
"alpha_frac": 0.4226692836,
"autogenerated": false,
"ratio": 4.899038461538462,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5821707745138462,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from greenlet import getcurrent, GreenletExit
from .core import gethub, Condition
from . import sleep, TimeoutError
class Pool(object):
def __init__(self, callback, *, limit, timeout):
self.callback = callback
self.limit = limit
self.timeout = timeout
self.current = 0
self._cond = Condition()
def __call__(self, *args, **kw):
self.current += 1
cur = getcurrent()
killer = gethub().do_spawn(partial(self._timeout, cur))
try:
return self.callback(*args, **kw)
finally:
if not killer.dead:
killer.detach().parent = cur
killer.throw(GreenletExit())
self.current -= 1
self._cond.notify()
def _timeout(self, task):
sleep(self.timeout)
cur = getcurrent()
cur.parent = task.detach()
raise TimeoutError()
def wait_slot(self):
while self.current >= self.limit:
self._cond.wait()
| {
"repo_name": "tailhook/zorro",
"path": "zorro/pool.py",
"copies": "1",
"size": "1037",
"license": "mit",
"hash": 3117820805662120400,
"line_mean": 24.925,
"line_max": 63,
"alpha_frac": 0.5641272903,
"autogenerated": false,
"ratio": 4.164658634538153,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5228785924838153,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from homely._ui import allowinteractive, note, shellquote, warn
from homely._utils import haveexecutable, run
__all__ = ["haveexecutable", "execute"]
def execute(cmd, stdout=None, stderr=None, expectexit=0, **kwargs):
# Executes `cmd` in a subprocess. Raises a SystemError if the exit code
# is different to `expecterror`.
#
# The stdout and stderr arguments for the most part work just like
# homely._ui.run(), with the main difference being that when stdout=None or
# stderr=None, these two streams will be filtered through the homely's
# logging functions instead of being sent directly to the python process's
# stdout/stderr. Also, the stderr argument will default to "STDOUT" so that
# the timing of the two streams is recorded more accurately.
#
# If the process absolutely _must_ talk to a TTY, you can use stdout="TTY",
# and a SystemError will be raised if homely is being run in
# non-interactive mode. When using stdout="TTY", you should omit the stderr
# argument.
def outputhandler(data, isend, prefix):
# FIXME: if we only get part of a stream, then we have a potential bug
# where we only get part of a multi-byte utf-8 character.
while len(data):
pos = data.find(b"\n")
if pos < 0:
break
# write out the line
note(data[0:pos].decode('utf-8'), dash=prefix)
data = data[pos+1:]
if isend:
if len(data):
note(data.decode('utf-8'), dash=prefix)
else:
# return any remaining data so it can be included at the start of
# the next run
return data
if stdout == "TTY":
if not allowinteractive():
raise SystemError("cmd wants interactive mode")
assert stderr is None
stdout = None
else:
if stdout is None:
prefix = "1> " if stderr is False else "&> "
stdout = partial(outputhandler, prefix=prefix)
if stderr is None:
if stdout in (False, True):
stderr = partial(outputhandler, prefix="2> ")
else:
stderr = "STDOUT"
outredir = ' 1> /dev/null' if stdout is False else ''
if stderr is None:
errredir = ' 2>&1'
else:
errredir = ' 2> /dev/null' if stderr is False else ''
with note('{}$ {}{}{}'.format(kwargs.get('cwd', ''),
' '.join(map(shellquote, cmd)),
outredir,
errredir)):
returncode, out, err = run(cmd, stdout=stdout, stderr=stderr, **kwargs)
if type(expectexit) is int:
exitok = returncode == expectexit
else:
exitok = returncode in expectexit
if exitok:
return returncode, out, err
# still need to dump the stdout/stderr if they were captured
if out is not None:
outputhandler(out, True, '1> ')
if err is not None:
outputhandler(err, True, '1> ')
message = "Unexpected exit code {}. Expected {}".format(
returncode, expectexit)
warn(message)
raise SystemError(message)
| {
"repo_name": "toomuchphp/terraform",
"path": "homely/system.py",
"copies": "3",
"size": "3287",
"license": "mit",
"hash": 6376184155689341000,
"line_mean": 37.2209302326,
"line_max": 79,
"alpha_frac": 0.5792515972,
"autogenerated": false,
"ratio": 4.181933842239186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6261185439439186,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from .digitalbitbox import DigitalBitboxPlugin
from electrum.i18n import _
from electrum.plugin import hook
from electrum.wallet import Standard_Wallet
class Plugin(DigitalBitboxPlugin, QtPluginBase):
icon_unpaired = ":icons/digitalbitbox_unpaired.png"
icon_paired = ":icons/digitalbitbox.png"
def create_handler(self, window):
return DigitalBitbox_Handler(window)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) is not self.keystore_class:
return
if not self.is_mobile_paired():
return
if not keystore.is_p2pkh():
return
if len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore))
menu.addAction(_("Show on {}").format(self.device), show_address)
class DigitalBitbox_Handler(QtHandlerBase):
def __init__(self, win):
super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
| {
"repo_name": "asfin/electrum",
"path": "electrum/plugins/digitalbitbox/qt.py",
"copies": "1",
"size": "1216",
"license": "mit",
"hash": 4813961156856652000,
"line_mean": 27.2790697674,
"line_max": 91,
"alpha_frac": 0.6578947368,
"autogenerated": false,
"ratio": 3.8603174603174604,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002527805864509606,
"num_lines": 43
} |
from functools import partial
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from .digitalbitbox import DigitalBitboxPlugin
from electrum.i18n import _
from electrum.plugins import hook
from electrum.wallet import Standard_Wallet
class Plugin(DigitalBitboxPlugin, QtPluginBase):
icon_unpaired = ":icons/digitalbitbox_unpaired.png"
icon_paired = ":icons/digitalbitbox.png"
def create_handler(self, window):
return DigitalBitbox_Handler(window)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) is not self.keystore_class:
return
if not self.is_mobile_paired():
return
if not keystore.is_p2pkh():
return
if len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore))
menu.addAction(_("Show on {}").format(self.device), show_address)
class DigitalBitbox_Handler(QtHandlerBase):
def __init__(self, win):
super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
| {
"repo_name": "kyuupichan/electrum",
"path": "plugins/digitalbitbox/qt.py",
"copies": "1",
"size": "1217",
"license": "mit",
"hash": -3788515959145935000,
"line_mean": 27.3023255814,
"line_max": 91,
"alpha_frac": 0.6581758422,
"autogenerated": false,
"ratio": 3.8634920634920635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5021667905692063,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from . import converters, exceptions, filters, validators
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has
from ._make import (
NOTHING,
Attribute,
Factory,
attrib,
attrs,
fields,
fields_dict,
make_class,
validate,
)
from ._version_info import VersionInfo
__version__ = "19.3.0"
__version_info__ = VersionInfo._from_version_string(__version__)
__title__ = "attrs"
__description__ = "Classes Without Boilerplate"
__url__ = "https://www.attrs.org/"
__uri__ = __url__
__doc__ = __description__ + " <" + __uri__ + ">"
__author__ = "Hynek Schlawack"
__email__ = "hs@ox.cx"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
s = attributes = attrs
ib = attr = attrib
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
__all__ = [
"Attribute",
"Factory",
"NOTHING",
"asdict",
"assoc",
"astuple",
"attr",
"attrib",
"attributes",
"attrs",
"converters",
"evolve",
"exceptions",
"fields",
"fields_dict",
"filters",
"get_run_validators",
"has",
"ib",
"make_class",
"s",
"set_run_validators",
"validate",
"validators",
]
| {
"repo_name": "pegasus-isi/pegasus",
"path": "packages/pegasus-common/src/Pegasus/vendor/attr/__init__.py",
"copies": "1",
"size": "1282",
"license": "apache-2.0",
"hash": -9021215339219988000,
"line_mean": 18.7230769231,
"line_max": 64,
"alpha_frac": 0.5904836193,
"autogenerated": false,
"ratio": 3.197007481296758,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4287491100596758,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .. import query
from ..graph import Link, Nothing
from ..types import TypeRef, Sequence
from ..query import merge
from ..types import Any
from ..engine import Query
from ..expr.refs import RequirementsExtractor
from ..expr.core import to_expr, S, THIS
from ..expr.checker import check, fn_types
from ..expr.compiler import ExpressionCompiler
def _create_result_proc(engine_query, procs, options):
def result_proc():
sq_result = engine_query.result()
return [[proc(this, sq_result, *opt_args)
for proc, opt_args in zip(procs, options)]
for this in sq_result[THIS]]
return result_proc
def _yield_options(query_field, graph_field):
options = query_field.options or {}
for option in graph_field.options:
value = options.get(option.name, option.default)
if value is Nothing:
raise TypeError('Required option "{}" for {!r} was not provided'
.format(option.name, graph_field))
else:
yield value
class BoundExpr:
def __init__(self, sub_graph, expr):
self.sub_graph = sub_graph
self.expr = expr
def __repr__(self):
expr, _ = to_expr(self.expr)
return ('<{}: sub_graph={!r}, expr={!r}>'
.format(self.__class__.__name__, self.sub_graph, expr))
def __postprocess__(self, field):
expr, funcs = to_expr(self.expr)
env = fn_types(funcs)
env.update(self.sub_graph.types['__root__'].__field_types__)
env.update((opt.name, opt.type or Any) for opt in field.options)
env[THIS] = TypeRef[self.sub_graph.node]
expr = check(expr, self.sub_graph.types, env)
option_names_set = set(opt.name for opt in field.options)
reqs = RequirementsExtractor.extract(self.sub_graph.types, expr)
reqs = query.Node([f for f in reqs.fields
if f.name not in option_names_set])
option_names = [opt.name for opt in field.options]
code = ExpressionCompiler.compile_lambda_expr(expr, option_names)
proc = partial(eval(compile(code, '<expr>', 'eval')),
{f.__def_name__: f.__def_body__ for f in funcs})
field.func = CheckedExpr(self.sub_graph, expr, reqs, proc)
def __call__(self, *args, **kwargs):
raise TypeError('Expression is not checked: {!r}'.format(self.expr))
class CheckedExpr:
def __init__(self, sub_graph, expr, reqs, proc):
self.sub_graph = sub_graph
self.expr = expr
self.reqs = reqs
self.proc = proc
def __repr__(self):
return ('<{}: sub_graph={!r}, expr={!r}, reqs={!r}>'
.format(self.__class__.__name__,
self.sub_graph, self.expr, self.reqs))
@property
def __subquery__(self):
return self.sub_graph
class SubGraph:
def __init__(self, graph, node):
self.graph = graph
self.node = node
self.types = graph.__types__
def __repr__(self):
return '<{}: node={!r}>'.format(self.__class__.__name__, self.node)
@property
def __subquery__(self):
return self
def __postprocess__(self, field):
BoundExpr(self, getattr(S.this, field.name)).__postprocess__(field)
def __call__(self, fields, ids, queue, ctx, task_set):
this_graph_link = Link(THIS, Sequence[TypeRef[self.node]], None,
requires=None)
reqs = merge([gf.func.reqs for gf, _ in fields])
procs = [gf.func.proc for gf, _ in fields]
option_values = [[qf.options[opt.name] for opt in gf.options]
for gf, qf in fields]
this_query_link = reqs.fields_map[THIS]
other_reqs = query.Node([r for r in reqs.fields
if r.name != THIS])
q = Query(queue, task_set, self.graph, reqs, ctx)
q.process_link(self.graph.root, this_graph_link, this_query_link,
None, ids)
q.process_node(self.graph.root, other_reqs, None)
return _create_result_proc(q, procs, option_values)
def compile(self, expr):
return BoundExpr(self, expr)
def c(self, expr):
return self.compile(expr)
| {
"repo_name": "vmagamedov/hiku",
"path": "hiku/sources/graph.py",
"copies": "1",
"size": "4291",
"license": "bsd-3-clause",
"hash": -6943962251393723000,
"line_mean": 32.2635658915,
"line_max": 76,
"alpha_frac": 0.5791190865,
"autogenerated": false,
"ratio": 3.7313043478260868,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48104234343260865,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from kazoo.exceptions import NoNodeError
from kazoo.protocol.states import EventType
from distconfig.backends.base import BaseBackend
class ZooKeeperBackend(BaseBackend):
"""Zooekeeper backend implentation.
If you are using gevent, make sure that the kazoo client is setup to
use gevent event handler e.g. ``KazooClient(..., handler=SequentialGeventHandler())``
User must call ``KazooClient.start()`` before using the backend.
:param client: Instance of :class:`kazoo.client.KazooClient`.
"""
def __init__(self, client, **kwargs):
super(ZooKeeperBackend, self).__init__(**kwargs)
self._client = client
def get_raw(self, path):
try:
return self._get_and_watch_path(path)
except NoNodeError:
return self._get_and_watch_unexistant_path(path)
def _get_and_watch_unexistant_path(self, path):
self._client.retry(self._client.exists, path, watch=partial(self._on_path_change, path))
def _get_and_watch_path(self, path):
data, _ = self._client.retry(self._client.get, path, watch=partial(self._on_path_change, path))
return data
def _on_path_change(self, path, event):
# XXX: Before we could set the new watches the data may have changed already
# in ZooKeeper backend, in that case we may have missed the notification.
# AFAIK there is no easy way to work around this.
# https://zookeeper.apache.org/doc/r3.1.2/zookeeperProgrammers.html#sc_WatchRememberThese
if event.type == EventType.DELETED:
data = self._get_and_watch_unexistant_path(path)
else:
data = self._get_and_watch_path(path)
return super(ZooKeeperBackend, self)._notify_listeners(data)
| {
"repo_name": "deliveryhero/distconfig",
"path": "distconfig/backends/zookeeper.py",
"copies": "2",
"size": "1789",
"license": "apache-2.0",
"hash": 7422117309203301000,
"line_mean": 37.0638297872,
"line_max": 103,
"alpha_frac": 0.6746785914,
"autogenerated": false,
"ratio": 3.5708582834331337,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5245536874833133,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from keras import backend as K
def dice_coefficient(y_true, y_pred, smooth=1.):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coefficient_loss(y_true, y_pred):
return -dice_coefficient(y_true, y_pred)
def weighted_dice_coefficient(y_true, y_pred, axis=(-3, -2, -1), smooth=0.00001):
"""
Weighted dice coefficient. Default axis assumes a "channels first" data structure
:param smooth:
:param y_true:
:param y_pred:
:param axis:
:return:
"""
return K.mean(2. * (K.sum(y_true * y_pred,
axis=axis) + smooth/2)/(K.sum(y_true,
axis=axis) + K.sum(y_pred,
axis=axis) + smooth))
def weighted_dice_coefficient_loss(y_true, y_pred):
return -weighted_dice_coefficient(y_true, y_pred)
def label_wise_dice_coefficient(y_true, y_pred, label_index):
return dice_coefficient(y_true[:, label_index], y_pred[:, label_index])
def get_label_dice_coefficient_function(label_index):
f = partial(label_wise_dice_coefficient, label_index=label_index)
f.__setattr__('__name__', 'label_{0}_dice_coef'.format(label_index))
return f
dice_coef = dice_coefficient
dice_coef_loss = dice_coefficient_loss
| {
"repo_name": "ellisdg/3DUnetCNN",
"path": "legacy/unet3dlegacy/metrics.py",
"copies": "1",
"size": "1508",
"license": "mit",
"hash": -2143257309783919600,
"line_mean": 31.085106383,
"line_max": 100,
"alpha_frac": 0.5915119363,
"autogenerated": false,
"ratio": 3.168067226890756,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4259579163190756,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from keras.layers import Input, LeakyReLU, Add, UpSampling3D, Activation, SpatialDropout3D, Conv3D
from keras.engine import Model
from keras.optimizers import Adam
from .unet import create_convolution_block, concatenate
from ..metrics import weighted_dice_coefficient_loss
create_convolution_block = partial(create_convolution_block, activation=LeakyReLU, instance_normalization=True)
def isensee2017_model(input_shape=(4, 128, 128, 128), n_base_filters=16, depth=5, dropout_rate=0.3,
n_segmentation_levels=3, n_labels=4, optimizer=Adam, initial_learning_rate=5e-4,
loss_function=weighted_dice_coefficient_loss, activation_name="sigmoid"):
"""
This function builds a model proposed by Isensee et al. for the BRATS 2017 competition:
https://www.cbica.upenn.edu/sbia/Spyridon.Bakas/MICCAI_BraTS/MICCAI_BraTS_2017_proceedings_shortPapers.pdf
This network is highly similar to the model proposed by Kayalibay et al. "CNN-based Segmentation of Medical
Imaging Data", 2017: https://arxiv.org/pdf/1701.03056.pdf
:param input_shape:
:param n_base_filters:
:param depth:
:param dropout_rate:
:param n_segmentation_levels:
:param n_labels:
:param optimizer:
:param initial_learning_rate:
:param loss_function:
:param activation_name:
:return:
"""
inputs = Input(input_shape)
current_layer = inputs
level_output_layers = list()
level_filters = list()
for level_number in range(depth):
n_level_filters = (2**level_number) * n_base_filters
level_filters.append(n_level_filters)
if current_layer is inputs:
in_conv = create_convolution_block(current_layer, n_level_filters)
else:
in_conv = create_convolution_block(current_layer, n_level_filters, strides=(2, 2, 2))
context_output_layer = create_context_module(in_conv, n_level_filters, dropout_rate=dropout_rate)
summation_layer = Add()([in_conv, context_output_layer])
level_output_layers.append(summation_layer)
current_layer = summation_layer
segmentation_layers = list()
for level_number in range(depth - 2, -1, -1):
up_sampling = create_up_sampling_module(current_layer, level_filters[level_number])
concatenation_layer = concatenate([level_output_layers[level_number], up_sampling], axis=1)
localization_output = create_localization_module(concatenation_layer, level_filters[level_number])
current_layer = localization_output
if level_number < n_segmentation_levels:
segmentation_layers.insert(0, Conv3D(n_labels, (1, 1, 1))(current_layer))
output_layer = None
for level_number in reversed(range(n_segmentation_levels)):
segmentation_layer = segmentation_layers[level_number]
if output_layer is None:
output_layer = segmentation_layer
else:
output_layer = Add()([output_layer, segmentation_layer])
if level_number > 0:
output_layer = UpSampling3D(size=(2, 2, 2))(output_layer)
activation_block = Activation(activation_name)(output_layer)
model = Model(inputs=inputs, outputs=activation_block)
model.compile(optimizer=optimizer(lr=initial_learning_rate), loss=loss_function)
return model
def create_localization_module(input_layer, n_filters):
convolution1 = create_convolution_block(input_layer, n_filters)
convolution2 = create_convolution_block(convolution1, n_filters, kernel=(1, 1, 1))
return convolution2
def create_up_sampling_module(input_layer, n_filters, size=(2, 2, 2)):
up_sample = UpSampling3D(size=size)(input_layer)
convolution = create_convolution_block(up_sample, n_filters)
return convolution
def create_context_module(input_layer, n_level_filters, dropout_rate=0.3, data_format="channels_first"):
convolution1 = create_convolution_block(input_layer=input_layer, n_filters=n_level_filters)
dropout = SpatialDropout3D(rate=dropout_rate, data_format=data_format)(convolution1)
convolution2 = create_convolution_block(input_layer=dropout, n_filters=n_level_filters)
return convolution2
| {
"repo_name": "ellisdg/3DUnetCNN",
"path": "legacy/unet3dlegacy/model/isensee2017.py",
"copies": "1",
"size": "4212",
"license": "mit",
"hash": 8053541314996926000,
"line_mean": 39.8932038835,
"line_max": 111,
"alpha_frac": 0.7046533713,
"autogenerated": false,
"ratio": 3.566469093988146,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4771122465288146,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from kivy.animation import Animation
from kivy.core.window import Window
from kivy.clock import Clock
from kivy.uix.bubble import Bubble, BubbleButton
from kivy.properties import ListProperty
from kivy.uix.widget import Widget
from electroncash_gui.i18n import _
class ContextMenuItem(Widget):
'''abstract class
'''
class ContextButton(ContextMenuItem, BubbleButton):
pass
class ContextMenu(Bubble):
buttons = ListProperty([_('ok'), _('cancel')])
'''List of Buttons to be displayed at the bottom'''
__events__ = ('on_press', 'on_release')
def __init__(self, **kwargs):
self._old_buttons = self.buttons
super(ContextMenu, self).__init__(**kwargs)
self.on_buttons(self, self.buttons)
def on_touch_down(self, touch):
if not self.collide_point(*touch.pos):
self.hide()
return
return super(ContextMenu, self).on_touch_down(touch)
def on_buttons(self, _menu, value):
if 'menu_content' not in self.ids.keys():
return
if value == self._old_buttons:
return
blayout = self.ids.menu_content
blayout.clear_widgets()
for btn in value:
ib = ContextButton(text=btn)
ib.bind(on_press=partial(self.dispatch, 'on_press'))
ib.bind(on_release=partial(self.dispatch, 'on_release'))
blayout.add_widget(ib)
self._old_buttons = value
def on_press(self, instance):
pass
def on_release(self, instance):
pass
def show(self, pos, duration=0):
Window.add_widget(self)
# wait for the bubble to adjust it's size according to text then animate
Clock.schedule_once(lambda dt: self._show(pos, duration))
def _show(self, pos, duration):
def on_stop(*l):
if duration:
Clock.schedule_once(self.hide, duration + .5)
self.opacity = 0
arrow_pos = self.arrow_pos
if arrow_pos[0] in ('l', 'r'):
pos = pos[0], pos[1] - (self.height/2)
else:
pos = pos[0] - (self.width/2), pos[1]
self.limit_to = Window
anim = Animation(opacity=1, pos=pos, d=.32)
anim.bind(on_complete=on_stop)
anim.cancel_all(self)
anim.start(self)
def hide(self, *dt):
def on_stop(*l):
Window.remove_widget(self)
anim = Animation(opacity=0, d=.25)
anim.bind(on_complete=on_stop)
anim.cancel_all(self)
anim.start(self)
def add_widget(self, widget, index=0):
if not isinstance(widget, ContextMenuItem):
super(ContextMenu, self).add_widget(widget, index)
return
menu_content.add_widget(widget, index)
| {
"repo_name": "molecular/electrum",
"path": "gui/kivy/uix/menus.py",
"copies": "1",
"size": "2782",
"license": "mit",
"hash": -6169850710554922000,
"line_mean": 28.2842105263,
"line_max": 80,
"alpha_frac": 0.5995686556,
"autogenerated": false,
"ratio": 3.6847682119205296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47843368675205294,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from kivy.core.image import Image as CoreImage
from kivy.properties import ObjectProperty, BooleanProperty, StringProperty
from kivy.uix.behaviors import ButtonBehavior, ToggleButtonBehavior
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.image import AsyncImage
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.uix.screenmanager import Screen, ScreenManager
from kivy.clock import Clock
from kivy.uix.behaviors import DragBehavior
from kivy.uix.scrollview import ScrollView
from icon_get import core_img_from_url
# y boundaries in range (0, 1)
LOWER_SCREEN_SCROLL_BOUNDARY = 0.10
UPPER_SCREEN_SCROLL_BOUNDARY = 0.85
Y_MAX_SCROLL_SPEED = 25
Y_MIN_SCROLL_SPEED = 3
class MyScroll(ScrollView):
"""
A scrollview with disabled scrolling by dragging the mouse.
"""
def on_touch_down(self, touch):
# Block mouse dragging scrolls
if touch.device == "mouse" and not touch.button.startswith("scroll"):
self.do_scroll_y = False
super().on_touch_down(touch)
self.do_scroll_y = True
else:
# Don't block the mouse wheel scrolls
super().on_touch_down(touch)
class MainScreen(Screen):
""" TODO: remove this?
on_scroll_change and on_slider_change bind the slider and scrollview together.
"""
def move_canvas_up(self, child):
"""
Draws some childs canvas over everything else.
Useful when dragging an icon over other icons.
"""
self.canvas.remove(child.canvas)
self.canvas.insert(len(self.canvas.get_group(None)), child.canvas)
class SettingsScreen(Screen):
needs_rebuild = BooleanProperty(False)
def rebuild_if_needed(self, app_instance):
if self.needs_rebuild:
app_instance.rebuild_main()
self.needs_rebuild = False
class MainScreenManager(ScreenManager):
pass
class ListEntry(DragBehavior, ButtonBehavior, BoxLayout):
"""
The main icon list entry. Is a grid with an icon and the name, can be clicked.
"""
img = ObjectProperty(None)
icon = ObjectProperty(None)
main_parent = ObjectProperty(None)
is_scrolling = BooleanProperty(False)
def __init__(self, icon, **kwargs):
super().__init__(**kwargs)
self.icon = icon
self.ids.icon_info.text = icon.name
self.main_parent = self.parent
self.img = AsyncImage(height=self.height, allow_stretch=True, size=(56, 56), size_hint=(None, None))
self.add_widget(self.img, index=len(self.children))
self.set_image()
def on_touch_down(self, touch):
super().on_touch_down(touch)
if not self.collide_point(*touch.pos):
return
if touch.grab_list and touch.grab_list[0]() is self: # if this widget was grabbed
self.main_parent = self.parent # remember your parent
def on_touch_move(self, touch):
super().on_touch_move(touch)
if not self.collide_point(*touch.pos):
return
# ?
if touch.grab_list and touch.grab_current is self: # if this widget was grabbed
self.main_parent = self.parent # remember your parent
# make this widget be drawn on top
self.parent.canvas.remove(self.canvas)
self.parent.canvas.insert(len(self.parent.canvas.get_group(None)), self.canvas)
x, y = touch.spos # position in the 0-1 coordinate system
if (y > 0.85 or y < 0.15) and not self.is_scrolling:
self.scrolling = True
Clock.schedule_once(partial(self.move_slider, touch), 0.05)
def move_slider(self, touch, dt=None):
if not touch.grab_list or not self.is_scrolling:
self.is_scrolling = False
return
relative_touch_y = touch.spos[1] # in range 0-1, the relative position on screen
scrollview = self.main_parent.parent
if relative_touch_y > UPPER_SCREEN_SCROLL_BOUNDARY:
distance = ((relative_touch_y - UPPER_SCREEN_SCROLL_BOUNDARY) * 100) ** 2
distance = Y_MAX_SCROLL_SPEED if distance > Y_MAX_SCROLL_SPEED \
else Y_MIN_SCROLL_SPEED if distance < Y_MIN_SCROLL_SPEED \
else distance # in bounds
scroll_distance = scrollview.convert_distance_to_scroll(0, distance)[1]
if scrollview.scroll_y + scroll_distance <= 1:
scrollview.scroll_y += scroll_distance
self.center_y += distance
else:
distance = (1 - scrollview.scroll_y) * scrollview.height
scrollview.scroll_y = 1
self.center_y += distance
Clock.schedule_once(partial(self.move_slider, touch), 0.03)
elif relative_touch_y < LOWER_SCREEN_SCROLL_BOUNDARY:
distance = ((LOWER_SCREEN_SCROLL_BOUNDARY - relative_touch_y) * 100) ** 2
distance = Y_MAX_SCROLL_SPEED if distance > Y_MAX_SCROLL_SPEED \
else Y_MIN_SCROLL_SPEED if distance < Y_MIN_SCROLL_SPEED \
else distance # in bounds
scroll_distance = scrollview.convert_distance_to_scroll(0, distance)[1]
if scrollview.scroll_y - scroll_distance >= 0:
scrollview.scroll_y -= scroll_distance
self.center_y -= distance
else:
distance = scrollview.scroll_y * scrollview.height
scrollview.scroll_y = 0
self.center_y -= distance
Clock.schedule_once(partial(self.move_slider, touch), 0.03)
else:
self.is_scrolling = False
def on_touch_up(self, touch):
if self.collide_point(*touch.pos) and self._drag_touch:
self.main_parent.remove_widget(self)
new_index = self.get_new_index()
self.main_parent.add_widget(self, index=new_index)
super().on_touch_up(touch)
def get_new_index(self):
new_x, new_y = self.center_x, self.center_y
children = list(self.main_parent.children)
is_between = lambda x, x1, x2: x1 <= x <= x2 or x1 >= x >= x2
# For edge cases
if len(children) >= 2:
if children[0].center_y > new_y:
return 0
elif children[-1].center_y < new_y:
return len(children)
for index, (child1, child2) in enumerate(zip(children[:-1], children[1:])):
if is_between(new_y, child1.center_y, child2.center_y):
return index + 1
def set_image(self):
'''
# Crop img if it exists already
if self.icon.icon_on_disk:
self.icon.bytes_on_disk.seek(0)
core_img = CoreImage(self.icon.bytes_on_disk, ext="png")
else:
core_img = core_img_from_url(self.icon.get_next_icon_url())
'''
self.img.texture = self.icon.core_image.texture
def open_entry_options_popup(self):
popup = ImageOptionsPopup(self)
popup.open()
class ToggleImage(ToggleButtonBehavior, AsyncImage):
"""
The toggle image. Used when selecting a preferred icon from the grid.
"""
def __init__(self, main_parent, index, **kwargs):
super().__init__(**kwargs)
self.main_parent = main_parent
self.index = index
def on_touch_down(self, touch):
# check for double taps first
if self.collide_point(*touch.pos):
if touch.is_double_tap:
self.main_parent.save(self)
super().on_touch_down(touch)
def on_state(self, widget, value):
if value == "down":
# Darker
self.color = [.8, .8, .8, .5]
else:
# Normal
self.color = [1, 1, 1, 1]
class ImageOptionsPopup(Popup):
"""
Shows possible icons for an app/game. Lets user select one.
"""
def __init__(self, entry, **kwargs):
super().__init__(**kwargs)
self.entry = entry
for i, icon_urlbytes in enumerate(entry.icon.url_bytes):
aiw = ToggleImage(main_parent=self, index=i, source=icon_urlbytes.url)
self.ids.image_grid.add_widget(aiw)
def next_image(self):
self.icon.get_next_icon_url()
self.img.texture = CoreImage(self.icon.current_icon_bytes(), ext="png").texture
def previous_image(self):
self.icon.get_previous_icon_url()
self.img.texture = CoreImage(self.icon.current_icon_bytes(), ext="png").texture
def set_image(self):
self.img.texture = CoreImage(self.icon.current_icon_bytes(), ext="png").texture
def save(self, selection):
self.entry.icon.index = selection.index
self.entry.img.texture = selection.texture
self.dismiss()
class WrappedLabel(ScrollView):
""" A label with wrapped text and scrolling if needed."""
text = StringProperty()
def __init__(self, text, **kwargs):
super().__init__(**kwargs)
self.text = text
| {
"repo_name": "EdvardasDlugauskas/Auto-Rain",
"path": "widgets.py",
"copies": "1",
"size": "7812",
"license": "mit",
"hash": -3439061246746005000,
"line_mean": 28.7034220532,
"line_max": 102,
"alpha_frac": 0.7025089606,
"autogenerated": false,
"ratio": 3.01854714064915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9064488469218426,
"avg_score": 0.03131352640614489,
"num_lines": 263
} |
from functools import partial
from kivy.graphics.instructions import Callback
from kivy.uix.relativelayout import RelativeLayout
from kivy.clock import Clock
from kivy.graphics.fbo import Fbo
from kivy.graphics.opengl import glReadPixels, GL_RGBA, GL_UNSIGNED_BYTE
from kivy.graphics.texture import Texture
from mpfmc.core.bcp_config_player import BcpConfigPlayer
class McDisplayLightPlayer(BcpConfigPlayer):
"""Grabs pixel from a display and use them as lights."""
config_file_section = 'display_light_player'
show_section = 'display_lights'
machine_collection_name = 'displays'
def __init__(self, machine):
super().__init__(machine)
self._scheduled = False
self._last_color = {}
# pylint: disable-msg=too-many-arguments
def play_element(self, settings, element, context, calling_context, priority=0, **kwargs):
context_dict = self._get_instance_dict(context)
if settings['action'] == "play":
if not self._scheduled:
self._scheduled = True
Clock.schedule_interval(self._tick, 0)
if element not in context_dict:
context_dict[element] = self._setup_fbo(element, settings, context)
else:
context_dict[element][5] = True
elif settings['action'] == "stop":
try:
context_dict[element][5] = False
except IndexError:
pass
else:
raise AssertionError("Unknown action {}".format(settings['action']))
def _setup_fbo(self, element, settings, context):
"""Setup FBO for a display."""
if element not in self.machine.displays:
raise AssertionError("Display {} not found. Please create it to use display_light_player.".format(element))
source = self.machine.displays[element]
# put the widget canvas on a Fbo
texture = Texture.create(size=source.size, colorfmt='rgba')
fbo = Fbo(size=source.size, texture=texture)
effect_widget = RelativeLayout()
effect_widget.size = source.size
fbo.add(effect_widget.canvas)
with source.canvas:
callback = Callback(partial(self._trigger_render, context, element))
return [fbo, effect_widget, source, settings, True, True, True, callback]
def _trigger_render(self, context, element, *args):
del args
context_dict = self._get_instance_dict(context)
if element not in context_dict:
return
context_dict[element][6] = True
def _tick(self, dt) -> None:
del dt
# run this at the end of the tick to make sure all kivy bind callbacks have executed
Clock.schedule_once(self._render_all, -1)
def _render_all(self, dt):
del dt
for context, instances in self.instances.items():
for element, instance in instances.items():
if not element[5] or not element[6]:
continue
self._render(instance, element, context)
# pylint: disable-msg=too-many-locals
def _render(self, instance, element, context):
fbo, effect_widget, source, settings, first, _, _, _ = instance
instance[4] = False
instance[6] = False
# detach the widget from the parent
parent = source.parent
if parent and hasattr(parent, "remove_display_source"):
parent.remove_display_source(source)
effect_widget.add_widget(source.container)
fbo.draw()
fbo.bind()
data = glReadPixels(0, 0, source.native_size[0], source.native_size[1],
GL_RGBA, GL_UNSIGNED_BYTE)
fbo.release()
effect_widget.remove_widget(source.container)
# reattach to the parent
if parent and hasattr(parent, "add_display_source"):
parent.add_display_source(source)
if not first:
# for some reasons we got garbage in the first buffer. we just skip it for now
values = {}
width = source.native_size[0]
height = source.native_size[1]
for x, y, name in settings['light_map']:
x_pixel = int(x * width)
y_pixel = height - int(y * height)
if (data[width * y_pixel * 4 + x_pixel * 4 + 3]) == 0:
# pixel is transparent
value = -1
else:
value = (
data[width * y_pixel * 4 + x_pixel * 4],
data[width * y_pixel * 4 + x_pixel * 4 + 1],
data[width * y_pixel * 4 + x_pixel * 4 + 2])
if name not in self._last_color or self._last_color[name] != value:
self._last_color[name] = value
values[name] = value
self.machine.bcp_processor.send("trigger", name="display_light_player_apply", context=context,
values=values, element=element, _silent=True)
# clear the fbo background
fbo.bind()
fbo.clear_buffer()
fbo.release()
def clear_context(self, context):
context_dict = self._get_instance_dict(context)
for _, instance in context_dict.items():
instance[2].canvas.remove(instance[7])
self._reset_instance_dict(context)
mc_player_cls = McDisplayLightPlayer
| {
"repo_name": "missionpinball/mpf_mc",
"path": "mpfmc/config_players/display_light_player.py",
"copies": "1",
"size": "5441",
"license": "mit",
"hash": -6893278795816764000,
"line_mean": 35.7635135135,
"line_max": 119,
"alpha_frac": 0.5842675979,
"autogenerated": false,
"ratio": 4.1502669717772696,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5234534569677269,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from kivy.properties import ObjectProperty
from kivy.uix.actionbar import ActionButton, ContextualActionView
class EditContView(ContextualActionView):
'''EditContView is a ContextualActionView, used to display Edit items:
Copy, Cut, Paste, Undo, Redo, Select All, Add Custom Widget. It has
events:
on_undo, emitted when Undo ActionButton is clicked.
on_redo, emitted when Redo ActionButton is clicked.
on_cut, emitted when Cut ActionButton is clicked.
on_copy, emitted when Copy ActionButton is clicked.
on_paste, emitted when Paste ActionButton is clicked.
on_delete, emitted when Delete ActionButton is clicked.
on_selectall, emitted when Select All ActionButton is clicked.
on_add_custom, emitted when Add Custom ActionButton is clicked.
on_find, emitted when Find ActionButton is clicked.
'''
__events__ = ('on_undo', 'on_redo', 'on_cut', 'on_copy',
'on_paste', 'on_delete', 'on_selectall',
'on_next_screen', 'on_prev_screen', 'on_find')
action_btn_next_screen = ObjectProperty(None, allownone=True)
action_btn_prev_screen = ObjectProperty(None, allownone=True)
action_btn_find = ObjectProperty(None, allownone=True)
def show_action_btn_screen(self, show):
'''To add action_btn_next_screen and action_btn_prev_screen
if show is True. Otherwise not.
'''
if self.action_btn_next_screen:
self.remove_widget(self.action_btn_next_screen)
if self.action_btn_prev_screen:
self.remove_widget(self.action_btn_prev_screen)
self.action_btn_next_screen = None
self.action_btn_prev_screen = None
if show:
self.action_btn_next_screen = ActionButton(text="Next Screen")
self.action_btn_next_screen.bind(
on_press=partial(self.dispatch, 'on_next_screen'))
self.action_btn_prev_screen = ActionButton(text="Previous Screen")
self.action_btn_prev_screen.bind(
on_press=partial(self.dispatch, 'on_prev_screen'))
self.add_widget(self.action_btn_next_screen)
self.add_widget(self.action_btn_prev_screen)
def show_find(self, show):
'''Adds the find button
'''
if self.action_btn_find is None:
find = ActionButton(text='Find')
find.bind(on_release=partial(self.dispatch, 'on_find'))
self.action_btn_find = find
if show:
if not self.action_btn_find in self.children:
self.add_widget(self.action_btn_find)
else:
if self.action_btn_find in self.children:
self.remove_widget(self.action_btn_find)
def on_undo(self, *args):
pass
def on_redo(self, *args):
pass
def on_cut(self, *args):
pass
def on_copy(self, *args):
pass
def on_paste(self, *args):
pass
def on_delete(self, *args):
pass
def on_selectall(self, *args):
pass
def on_next_screen(self, *args):
pass
def on_prev_screen(self, *args):
pass
def on_find(self, *args):
pass
| {
"repo_name": "kiok46/kivy-designer",
"path": "designer/components/edit_contextual_view.py",
"copies": "4",
"size": "3247",
"license": "mit",
"hash": -6543123781423037000,
"line_mean": 32.8229166667,
"line_max": 78,
"alpha_frac": 0.6199568833,
"autogenerated": false,
"ratio": 3.7364787111622553,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00017959770114942528,
"num_lines": 96
} |
from functools import partial
from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core import types, cgutils
from numba.np import numpy_support
class DataModel(object):
"""
DataModel describe how a FE type is represented in the LLVM IR at
different contexts.
Contexts are:
- value: representation inside function body. Maybe stored in stack.
The representation here are flexible.
- data: representation used when storing into containers (e.g. arrays).
- argument: representation used for function argument. All composite
types are unflattened into multiple primitive types.
- return: representation used for return argument.
Throughput the compiler pipeline, a LLVM value is usually passed around
in the "value" representation. All "as_" prefix function converts from
"value" representation. All "from_" prefix function converts to the
"value" representation.
"""
def __init__(self, dmm, fe_type):
self._dmm = dmm
self._fe_type = fe_type
@property
def fe_type(self):
return self._fe_type
def get_value_type(self):
raise NotImplementedError(self)
def get_data_type(self):
return self.get_value_type()
def get_argument_type(self):
"""Return a LLVM type or nested tuple of LLVM type
"""
return self.get_value_type()
def get_return_type(self):
return self.get_value_type()
def as_data(self, builder, value):
raise NotImplementedError(self)
def as_argument(self, builder, value):
"""
Takes one LLVM value
Return a LLVM value or nested tuple of LLVM value
"""
raise NotImplementedError(self)
def as_return(self, builder, value):
raise NotImplementedError(self)
def from_data(self, builder, value):
raise NotImplementedError(self)
def from_argument(self, builder, value):
"""
Takes a LLVM value or nested tuple of LLVM value
Returns one LLVM value
"""
raise NotImplementedError(self)
def from_return(self, builder, value):
raise NotImplementedError(self)
def load_from_data_pointer(self, builder, ptr, align=None):
"""
Load value from a pointer to data.
This is the default implementation, sufficient for most purposes.
"""
return self.from_data(builder, builder.load(ptr, align=align))
def traverse(self, builder):
"""
Traverse contained members.
Returns a iterable of contained (types, getters).
Each getter is a one-argument function accepting a LLVM value.
"""
return []
def traverse_models(self):
"""
Recursively list all models involved in this model.
"""
return [self._dmm[t] for t in self.traverse_types()]
def traverse_types(self):
"""
Recursively list all frontend types involved in this model.
"""
return [self._fe_type] + self.inner_types()
def inner_types(self):
"""
List all *inner* frontend types.
"""
return []
def get_nrt_meminfo(self, builder, value):
"""
Returns the MemInfo object or None if it is not tracked.
It is only defined for types.meminfo_pointer
"""
return None
def has_nrt_meminfo(self):
return False
def contains_nrt_meminfo(self):
"""
Recursively check all contained types for need for NRT meminfo.
"""
return any(model.has_nrt_meminfo() for model in self.traverse_models())
def _compared_fields(self):
return (type(self), self._fe_type)
def __hash__(self):
return hash(tuple(self._compared_fields()))
def __eq__(self, other):
if type(self) is type(other):
return self._compared_fields() == other._compared_fields()
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
@register_default(types.Omitted)
class OmittedArgDataModel(DataModel):
"""
A data model for omitted arguments. Only the "argument" representation
is defined, other representations raise a NotImplementedError.
"""
# Omitted arguments don't produce any LLVM function argument.
def get_argument_type(self):
return ()
def as_argument(self, builder, val):
return ()
def from_argument(self, builder, val):
assert val == (), val
return None
@register_default(types.Boolean)
@register_default(types.BooleanLiteral)
class BooleanModel(DataModel):
_bit_type = ir.IntType(1)
_byte_type = ir.IntType(8)
def get_value_type(self):
return self._bit_type
def get_data_type(self):
return self._byte_type
def get_return_type(self):
return self.get_data_type()
def get_argument_type(self):
return self.get_data_type()
def as_data(self, builder, value):
return builder.zext(value, self.get_data_type())
def as_argument(self, builder, value):
return self.as_data(builder, value)
def as_return(self, builder, value):
return self.as_data(builder, value)
def from_data(self, builder, value):
ty = self.get_value_type()
resalloca = cgutils.alloca_once(builder, ty)
cond = builder.icmp_unsigned('==', value, value.type(0))
with builder.if_else(cond) as (then, otherwise):
with then:
builder.store(ty(0), resalloca)
with otherwise:
builder.store(ty(1), resalloca)
return builder.load(resalloca)
def from_argument(self, builder, value):
return self.from_data(builder, value)
def from_return(self, builder, value):
return self.from_data(builder, value)
class PrimitiveModel(DataModel):
"""A primitive type can be represented natively in the target in all
usage contexts.
"""
def __init__(self, dmm, fe_type, be_type):
super(PrimitiveModel, self).__init__(dmm, fe_type)
self.be_type = be_type
def get_value_type(self):
return self.be_type
def as_data(self, builder, value):
return value
def as_argument(self, builder, value):
return value
def as_return(self, builder, value):
return value
def from_data(self, builder, value):
return value
def from_argument(self, builder, value):
return value
def from_return(self, builder, value):
return value
class ProxyModel(DataModel):
"""
Helper class for models which delegate to another model.
"""
def get_value_type(self):
return self._proxied_model.get_value_type()
def get_data_type(self):
return self._proxied_model.get_data_type()
def get_return_type(self):
return self._proxied_model.get_return_type()
def get_argument_type(self):
return self._proxied_model.get_argument_type()
def as_data(self, builder, value):
return self._proxied_model.as_data(builder, value)
def as_argument(self, builder, value):
return self._proxied_model.as_argument(builder, value)
def as_return(self, builder, value):
return self._proxied_model.as_return(builder, value)
def from_data(self, builder, value):
return self._proxied_model.from_data(builder, value)
def from_argument(self, builder, value):
return self._proxied_model.from_argument(builder, value)
def from_return(self, builder, value):
return self._proxied_model.from_return(builder, value)
@register_default(types.EnumMember)
@register_default(types.IntEnumMember)
class EnumModel(ProxyModel):
"""
Enum members are represented exactly like their values.
"""
def __init__(self, dmm, fe_type):
super(EnumModel, self).__init__(dmm, fe_type)
self._proxied_model = dmm.lookup(fe_type.dtype)
@register_default(types.Opaque)
@register_default(types.PyObject)
@register_default(types.RawPointer)
@register_default(types.NoneType)
@register_default(types.StringLiteral)
@register_default(types.EllipsisType)
@register_default(types.Function)
@register_default(types.Type)
@register_default(types.Object)
@register_default(types.Module)
@register_default(types.Phantom)
@register_default(types.ContextManager)
@register_default(types.Dispatcher)
@register_default(types.ObjModeDispatcher)
@register_default(types.ExceptionClass)
@register_default(types.Dummy)
@register_default(types.ExceptionInstance)
@register_default(types.ExternalFunction)
@register_default(types.EnumClass)
@register_default(types.IntEnumClass)
@register_default(types.NumberClass)
@register_default(types.TypeRef)
@register_default(types.NamedTupleClass)
@register_default(types.DType)
@register_default(types.RecursiveCall)
@register_default(types.MakeFunctionLiteral)
@register_default(types.Poison)
class OpaqueModel(PrimitiveModel):
"""
Passed as opaque pointers
"""
_ptr_type = ir.IntType(8).as_pointer()
def __init__(self, dmm, fe_type):
be_type = self._ptr_type
super(OpaqueModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.MemInfoPointer)
class MemInfoModel(OpaqueModel):
def inner_types(self):
return self._dmm.lookup(self._fe_type.dtype).traverse_types()
def has_nrt_meminfo(self):
return True
def get_nrt_meminfo(self, builder, value):
return value
@register_default(types.Integer)
@register_default(types.IntegerLiteral)
class IntegerModel(PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(fe_type.bitwidth)
super(IntegerModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.CPointer)
class PointerModel(PrimitiveModel):
def __init__(self, dmm, fe_type):
self._pointee_model = dmm.lookup(fe_type.dtype)
self._pointee_be_type = self._pointee_model.get_data_type()
be_type = self._pointee_be_type.as_pointer()
super(PointerModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.EphemeralPointer)
class EphemeralPointerModel(PointerModel):
def get_data_type(self):
return self._pointee_be_type
def as_data(self, builder, value):
value = builder.load(value)
return self._pointee_model.as_data(builder, value)
def from_data(self, builder, value):
raise NotImplementedError("use load_from_data_pointer() instead")
def load_from_data_pointer(self, builder, ptr, align=None):
return builder.bitcast(ptr, self.get_value_type())
@register_default(types.EphemeralArray)
class EphemeralArrayModel(PointerModel):
def __init__(self, dmm, fe_type):
super(EphemeralArrayModel, self).__init__(dmm, fe_type)
self._data_type = ir.ArrayType(self._pointee_be_type,
self._fe_type.count)
def get_data_type(self):
return self._data_type
def as_data(self, builder, value):
values = [builder.load(cgutils.gep_inbounds(builder, value, i))
for i in range(self._fe_type.count)]
return cgutils.pack_array(builder, values)
def from_data(self, builder, value):
raise NotImplementedError("use load_from_data_pointer() instead")
def load_from_data_pointer(self, builder, ptr, align=None):
return builder.bitcast(ptr, self.get_value_type())
@register_default(types.ExternalFunctionPointer)
class ExternalFuncPointerModel(PrimitiveModel):
def __init__(self, dmm, fe_type):
sig = fe_type.sig
# Since the function is non-Numba, there is no adaptation
# of arguments and return value, hence get_value_type().
retty = dmm.lookup(sig.return_type).get_value_type()
args = [dmm.lookup(t).get_value_type() for t in sig.args]
be_type = ir.PointerType(ir.FunctionType(retty, args))
super(ExternalFuncPointerModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.UniTuple)
@register_default(types.NamedUniTuple)
@register_default(types.StarArgUniTuple)
class UniTupleModel(DataModel):
def __init__(self, dmm, fe_type):
super(UniTupleModel, self).__init__(dmm, fe_type)
self._elem_model = dmm.lookup(fe_type.dtype)
self._count = len(fe_type)
self._value_type = ir.ArrayType(self._elem_model.get_value_type(),
self._count)
self._data_type = ir.ArrayType(self._elem_model.get_data_type(),
self._count)
def get_value_type(self):
return self._value_type
def get_data_type(self):
return self._data_type
def get_return_type(self):
return self.get_value_type()
def get_argument_type(self):
return (self._elem_model.get_argument_type(),) * self._count
def as_argument(self, builder, value):
out = []
for i in range(self._count):
v = builder.extract_value(value, [i])
v = self._elem_model.as_argument(builder, v)
out.append(v)
return out
def from_argument(self, builder, value):
out = ir.Constant(self.get_value_type(), ir.Undefined)
for i, v in enumerate(value):
v = self._elem_model.from_argument(builder, v)
out = builder.insert_value(out, v, [i])
return out
def as_data(self, builder, value):
out = ir.Constant(self.get_data_type(), ir.Undefined)
for i in range(self._count):
val = builder.extract_value(value, [i])
dval = self._elem_model.as_data(builder, val)
out = builder.insert_value(out, dval, [i])
return out
def from_data(self, builder, value):
out = ir.Constant(self.get_value_type(), ir.Undefined)
for i in range(self._count):
val = builder.extract_value(value, [i])
dval = self._elem_model.from_data(builder, val)
out = builder.insert_value(out, dval, [i])
return out
def as_return(self, builder, value):
return value
def from_return(self, builder, value):
return value
def traverse(self, builder):
def getter(i, value):
return builder.extract_value(value, i)
return [(self._fe_type.dtype, partial(getter, i))
for i in range(self._count)]
def inner_types(self):
return self._elem_model.traverse_types()
class CompositeModel(DataModel):
"""Any model that is composed of multiple other models should subclass from
this.
"""
pass
class StructModel(CompositeModel):
_value_type = None
_data_type = None
def __init__(self, dmm, fe_type, members):
super(StructModel, self).__init__(dmm, fe_type)
if members:
self._fields, self._members = zip(*members)
else:
self._fields = self._members = ()
self._models = tuple([self._dmm.lookup(t) for t in self._members])
def get_member_fe_type(self, name):
"""
StructModel-specific: get the Numba type of the field named *name*.
"""
pos = self.get_field_position(name)
return self._members[pos]
def get_value_type(self):
if self._value_type is None:
self._value_type = ir.LiteralStructType([t.get_value_type()
for t in self._models])
return self._value_type
def get_data_type(self):
if self._data_type is None:
self._data_type = ir.LiteralStructType([t.get_data_type()
for t in self._models])
return self._data_type
def get_argument_type(self):
return tuple([t.get_argument_type() for t in self._models])
def get_return_type(self):
return self.get_data_type()
def _as(self, methname, builder, value):
extracted = []
for i, dm in enumerate(self._models):
extracted.append(getattr(dm, methname)(builder,
self.get(builder, value, i)))
return tuple(extracted)
def _from(self, methname, builder, value):
struct = ir.Constant(self.get_value_type(), ir.Undefined)
for i, (dm, val) in enumerate(zip(self._models, value)):
v = getattr(dm, methname)(builder, val)
struct = self.set(builder, struct, v, i)
return struct
def as_data(self, builder, value):
"""
Converts the LLVM struct in `value` into a representation suited for
storing into arrays.
Note
----
Current implementation rarely changes how types are represented for
"value" and "data". This is usually a pointless rebuild of the
immutable LLVM struct value. Luckily, LLVM optimization removes all
redundancy.
Sample usecase: Structures nested with pointers to other structures
that can be serialized into a flat representation when storing into
array.
"""
elems = self._as("as_data", builder, value)
struct = ir.Constant(self.get_data_type(), ir.Undefined)
for i, el in enumerate(elems):
struct = builder.insert_value(struct, el, [i])
return struct
def from_data(self, builder, value):
"""
Convert from "data" representation back into "value" representation.
Usually invoked when loading from array.
See notes in `as_data()`
"""
vals = [builder.extract_value(value, [i])
for i in range(len(self._members))]
return self._from("from_data", builder, vals)
def load_from_data_pointer(self, builder, ptr, align=None):
values = []
for i, model in enumerate(self._models):
elem_ptr = cgutils.gep_inbounds(builder, ptr, 0, i)
val = model.load_from_data_pointer(builder, elem_ptr, align)
values.append(val)
struct = ir.Constant(self.get_value_type(), ir.Undefined)
for i, val in enumerate(values):
struct = self.set(builder, struct, val, i)
return struct
def as_argument(self, builder, value):
return self._as("as_argument", builder, value)
def from_argument(self, builder, value):
return self._from("from_argument", builder, value)
def as_return(self, builder, value):
elems = self._as("as_data", builder, value)
struct = ir.Constant(self.get_data_type(), ir.Undefined)
for i, el in enumerate(elems):
struct = builder.insert_value(struct, el, [i])
return struct
def from_return(self, builder, value):
vals = [builder.extract_value(value, [i])
for i in range(len(self._members))]
return self._from("from_data", builder, vals)
def get(self, builder, val, pos):
"""Get a field at the given position or the fieldname
Args
----
builder:
LLVM IRBuilder
val:
value to be inserted
pos: int or str
field index or field name
Returns
-------
Extracted value
"""
if isinstance(pos, str):
pos = self.get_field_position(pos)
return builder.extract_value(val, [pos],
name="extracted." + self._fields[pos])
def set(self, builder, stval, val, pos):
"""Set a field at the given position or the fieldname
Args
----
builder:
LLVM IRBuilder
stval:
LLVM struct value
val:
value to be inserted
pos: int or str
field index or field name
Returns
-------
A new LLVM struct with the value inserted
"""
if isinstance(pos, str):
pos = self.get_field_position(pos)
return builder.insert_value(stval, val, [pos],
name="inserted." + self._fields[pos])
def get_field_position(self, field):
try:
return self._fields.index(field)
except ValueError:
raise KeyError("%s does not have a field named %r"
% (self.__class__.__name__, field))
@property
def field_count(self):
return len(self._fields)
def get_type(self, pos):
"""Get the frontend type (numba type) of a field given the position
or the fieldname
Args
----
pos: int or str
field index or field name
"""
if isinstance(pos, str):
pos = self.get_field_position(pos)
return self._members[pos]
def get_model(self, pos):
"""
Get the datamodel of a field given the position or the fieldname.
Args
----
pos: int or str
field index or field name
"""
return self._models[pos]
def traverse(self, builder):
def getter(k, value):
if value.type != self.get_value_type():
args = self.get_value_type(), value.type
raise TypeError("expecting {0} but got {1}".format(*args))
return self.get(builder, value, k)
return [(self.get_type(k), partial(getter, k)) for k in self._fields]
def inner_types(self):
types = []
for dm in self._models:
types += dm.traverse_types()
return types
@register_default(types.Complex)
class ComplexModel(StructModel):
_element_type = NotImplemented
def __init__(self, dmm, fe_type):
members = [
('real', fe_type.underlying_float),
('imag', fe_type.underlying_float),
]
super(ComplexModel, self).__init__(dmm, fe_type, members)
@register_default(types.LiteralList)
@register_default(types.LiteralStrKeyDict)
@register_default(types.Tuple)
@register_default(types.NamedTuple)
@register_default(types.StarArgTuple)
class TupleModel(StructModel):
def __init__(self, dmm, fe_type):
members = [('f' + str(i), t) for i, t in enumerate(fe_type)]
super(TupleModel, self).__init__(dmm, fe_type, members)
@register_default(types.UnionType)
class UnionModel(StructModel):
def __init__(self, dmm, fe_type):
members = [
('tag', types.uintp),
# XXX: it should really be a MemInfoPointer(types.voidptr)
('payload', types.Tuple.from_types(fe_type.types)),
]
super(UnionModel, self).__init__(dmm, fe_type, members)
@register_default(types.Pair)
class PairModel(StructModel):
def __init__(self, dmm, fe_type):
members = [('first', fe_type.first_type),
('second', fe_type.second_type)]
super(PairModel, self).__init__(dmm, fe_type, members)
@register_default(types.ListPayload)
class ListPayloadModel(StructModel):
def __init__(self, dmm, fe_type):
# The fields are mutable but the payload is always manipulated
# by reference. This scheme allows mutations of an array to
# be seen by its iterators.
members = [
('size', types.intp),
('allocated', types.intp),
# This member is only used only for reflected lists
('dirty', types.boolean),
# Actually an inlined var-sized array
('data', fe_type.container.dtype),
]
super(ListPayloadModel, self).__init__(dmm, fe_type, members)
@register_default(types.List)
class ListModel(StructModel):
def __init__(self, dmm, fe_type):
payload_type = types.ListPayload(fe_type)
members = [
# The meminfo data points to a ListPayload
('meminfo', types.MemInfoPointer(payload_type)),
# This member is only used only for reflected lists
('parent', types.pyobject),
]
super(ListModel, self).__init__(dmm, fe_type, members)
@register_default(types.ListIter)
class ListIterModel(StructModel):
def __init__(self, dmm, fe_type):
payload_type = types.ListPayload(fe_type.container)
members = [
# The meminfo data points to a ListPayload (shared with the
# original list object)
('meminfo', types.MemInfoPointer(payload_type)),
('index', types.EphemeralPointer(types.intp)),
]
super(ListIterModel, self).__init__(dmm, fe_type, members)
@register_default(types.SetEntry)
class SetEntryModel(StructModel):
def __init__(self, dmm, fe_type):
dtype = fe_type.set_type.dtype
members = [
# -1 = empty, -2 = deleted
('hash', types.intp),
('key', dtype),
]
super(SetEntryModel, self).__init__(dmm, fe_type, members)
@register_default(types.SetPayload)
class SetPayloadModel(StructModel):
def __init__(self, dmm, fe_type):
entry_type = types.SetEntry(fe_type.container)
members = [
# Number of active + deleted entries
('fill', types.intp),
# Number of active entries
('used', types.intp),
# Allocated size - 1 (size being a power of 2)
('mask', types.intp),
# Search finger
('finger', types.intp),
# This member is only used only for reflected sets
('dirty', types.boolean),
# Actually an inlined var-sized array
('entries', entry_type),
]
super(SetPayloadModel, self).__init__(dmm, fe_type, members)
@register_default(types.Set)
class SetModel(StructModel):
def __init__(self, dmm, fe_type):
payload_type = types.SetPayload(fe_type)
members = [
# The meminfo data points to a SetPayload
('meminfo', types.MemInfoPointer(payload_type)),
# This member is only used only for reflected sets
('parent', types.pyobject),
]
super(SetModel, self).__init__(dmm, fe_type, members)
@register_default(types.SetIter)
class SetIterModel(StructModel):
def __init__(self, dmm, fe_type):
payload_type = types.SetPayload(fe_type.container)
members = [
# The meminfo data points to a SetPayload (shared with the
# original set object)
('meminfo', types.MemInfoPointer(payload_type)),
# The index into the entries table
('index', types.EphemeralPointer(types.intp)),
]
super(SetIterModel, self).__init__(dmm, fe_type, members)
@register_default(types.Array)
@register_default(types.Buffer)
@register_default(types.ByteArray)
@register_default(types.Bytes)
@register_default(types.MemoryView)
@register_default(types.PyArray)
class ArrayModel(StructModel):
def __init__(self, dmm, fe_type):
ndim = fe_type.ndim
members = [
('meminfo', types.MemInfoPointer(fe_type.dtype)),
('parent', types.pyobject),
('nitems', types.intp),
('itemsize', types.intp),
('data', types.CPointer(fe_type.dtype)),
('shape', types.UniTuple(types.intp, ndim)),
('strides', types.UniTuple(types.intp, ndim)),
]
super(ArrayModel, self).__init__(dmm, fe_type, members)
@register_default(types.ArrayFlags)
class ArrayFlagsModel(StructModel):
def __init__(self, dmm, fe_type):
members = [
('parent', fe_type.array_type),
]
super(ArrayFlagsModel, self).__init__(dmm, fe_type, members)
@register_default(types.NestedArray)
class NestedArrayModel(ArrayModel):
def __init__(self, dmm, fe_type):
self._be_type = dmm.lookup(fe_type.dtype).get_data_type()
super(NestedArrayModel, self).__init__(dmm, fe_type)
def get_data_type(self):
ret = ir.ArrayType(self._be_type, self._fe_type.nitems)
return ret
@register_default(types.Optional)
class OptionalModel(StructModel):
def __init__(self, dmm, fe_type):
members = [
('data', fe_type.type),
('valid', types.boolean),
]
self._value_model = dmm.lookup(fe_type.type)
super(OptionalModel, self).__init__(dmm, fe_type, members)
def get_return_type(self):
return self._value_model.get_return_type()
def as_return(self, builder, value):
raise NotImplementedError
def from_return(self, builder, value):
return self._value_model.from_return(builder, value)
def traverse(self, builder):
def get_data(value):
valid = get_valid(value)
data = self.get(builder, value, "data")
return builder.select(valid, data, ir.Constant(data.type, None))
def get_valid(value):
return self.get(builder, value, "valid")
return [(self.get_type("data"), get_data),
(self.get_type("valid"), get_valid)]
@register_default(types.Record)
class RecordModel(CompositeModel):
def __init__(self, dmm, fe_type):
super(RecordModel, self).__init__(dmm, fe_type)
self._models = [self._dmm.lookup(t) for _, t in fe_type.members]
self._be_type = ir.ArrayType(ir.IntType(8), fe_type.size)
self._be_ptr_type = self._be_type.as_pointer()
def get_value_type(self):
"""Passed around as reference to underlying data
"""
return self._be_ptr_type
def get_argument_type(self):
return self._be_ptr_type
def get_return_type(self):
return self._be_ptr_type
def get_data_type(self):
return self._be_type
def as_data(self, builder, value):
return builder.load(value)
def from_data(self, builder, value):
raise NotImplementedError("use load_from_data_pointer() instead")
def as_argument(self, builder, value):
return value
def from_argument(self, builder, value):
return value
def as_return(self, builder, value):
return value
def from_return(self, builder, value):
return value
def load_from_data_pointer(self, builder, ptr, align=None):
return builder.bitcast(ptr, self.get_value_type())
@register_default(types.UnicodeCharSeq)
class UnicodeCharSeq(DataModel):
def __init__(self, dmm, fe_type):
super(UnicodeCharSeq, self).__init__(dmm, fe_type)
charty = ir.IntType(numpy_support.sizeof_unicode_char * 8)
self._be_type = ir.ArrayType(charty, fe_type.count)
def get_value_type(self):
return self._be_type
def get_data_type(self):
return self._be_type
def as_data(self, builder, value):
return value
def from_data(self, builder, value):
return value
def as_return(self, builder, value):
return value
def from_return(self, builder, value):
return value
def as_argument(self, builder, value):
return value
def from_argument(self, builder, value):
return value
@register_default(types.CharSeq)
class CharSeq(DataModel):
def __init__(self, dmm, fe_type):
super(CharSeq, self).__init__(dmm, fe_type)
charty = ir.IntType(8)
self._be_type = ir.ArrayType(charty, fe_type.count)
def get_value_type(self):
return self._be_type
def get_data_type(self):
return self._be_type
def as_data(self, builder, value):
return value
def from_data(self, builder, value):
return value
def as_return(self, builder, value):
return value
def from_return(self, builder, value):
return value
def as_argument(self, builder, value):
return value
def from_argument(self, builder, value):
return value
class CContiguousFlatIter(StructModel):
def __init__(self, dmm, fe_type, need_indices):
assert fe_type.array_type.layout == 'C'
array_type = fe_type.array_type
dtype = array_type.dtype
ndim = array_type.ndim
members = [('array', array_type),
('stride', types.intp),
('index', types.EphemeralPointer(types.intp)),
]
if need_indices:
# For ndenumerate()
members.append(('indices', types.EphemeralArray(types.intp, ndim)))
super(CContiguousFlatIter, self).__init__(dmm, fe_type, members)
class FlatIter(StructModel):
def __init__(self, dmm, fe_type):
array_type = fe_type.array_type
dtype = array_type.dtype
ndim = array_type.ndim
members = [('array', array_type),
('pointers', types.EphemeralArray(types.CPointer(dtype), ndim)),
('indices', types.EphemeralArray(types.intp, ndim)),
('exhausted', types.EphemeralPointer(types.boolean)),
]
super(FlatIter, self).__init__(dmm, fe_type, members)
@register_default(types.UniTupleIter)
class UniTupleIter(StructModel):
def __init__(self, dmm, fe_type):
members = [('index', types.EphemeralPointer(types.intp)),
('tuple', fe_type.container,)]
super(UniTupleIter, self).__init__(dmm, fe_type, members)
@register_default(types.SliceType)
class SliceModel(StructModel):
def __init__(self, dmm, fe_type):
members = [('start', types.intp),
('stop', types.intp),
('step', types.intp),
]
super(SliceModel, self).__init__(dmm, fe_type, members)
@register_default(types.NPDatetime)
@register_default(types.NPTimedelta)
class NPDatetimeModel(PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super(NPDatetimeModel, self).__init__(dmm, fe_type, be_type)
@register_default(types.ArrayIterator)
class ArrayIterator(StructModel):
def __init__(self, dmm, fe_type):
# We use an unsigned index to avoid the cost of negative index tests.
members = [('index', types.EphemeralPointer(types.uintp)),
('array', fe_type.array_type)]
super(ArrayIterator, self).__init__(dmm, fe_type, members)
@register_default(types.EnumerateType)
class EnumerateType(StructModel):
def __init__(self, dmm, fe_type):
members = [('count', types.EphemeralPointer(types.intp)),
('iter', fe_type.source_type)]
super(EnumerateType, self).__init__(dmm, fe_type, members)
@register_default(types.ZipType)
class ZipType(StructModel):
def __init__(self, dmm, fe_type):
members = [('iter%d' % i, source_type.iterator_type)
for i, source_type in enumerate(fe_type.source_types)]
super(ZipType, self).__init__(dmm, fe_type, members)
@register_default(types.RangeIteratorType)
class RangeIteratorType(StructModel):
def __init__(self, dmm, fe_type):
int_type = fe_type.yield_type
members = [('iter', types.EphemeralPointer(int_type)),
('stop', int_type),
('step', int_type),
('count', types.EphemeralPointer(int_type))]
super(RangeIteratorType, self).__init__(dmm, fe_type, members)
@register_default(types.Generator)
class GeneratorModel(CompositeModel):
def __init__(self, dmm, fe_type):
super(GeneratorModel, self).__init__(dmm, fe_type)
# XXX Fold this in DataPacker?
self._arg_models = [self._dmm.lookup(t) for t in fe_type.arg_types
if not isinstance(t, types.Omitted)]
self._state_models = [self._dmm.lookup(t) for t in fe_type.state_types]
self._args_be_type = ir.LiteralStructType(
[t.get_data_type() for t in self._arg_models])
self._state_be_type = ir.LiteralStructType(
[t.get_data_type() for t in self._state_models])
# The whole generator closure
self._be_type = ir.LiteralStructType(
[self._dmm.lookup(types.int32).get_value_type(),
self._args_be_type, self._state_be_type])
self._be_ptr_type = self._be_type.as_pointer()
def get_value_type(self):
"""
The generator closure is passed around as a reference.
"""
return self._be_ptr_type
def get_argument_type(self):
return self._be_ptr_type
def get_return_type(self):
return self._be_type
def get_data_type(self):
return self._be_type
def as_argument(self, builder, value):
return value
def from_argument(self, builder, value):
return value
def as_return(self, builder, value):
return self.as_data(builder, value)
def from_return(self, builder, value):
return self.from_data(builder, value)
def as_data(self, builder, value):
return builder.load(value)
def from_data(self, builder, value):
stack = cgutils.alloca_once(builder, value.type)
builder.store(value, stack)
return stack
@register_default(types.ArrayCTypes)
class ArrayCTypesModel(StructModel):
def __init__(self, dmm, fe_type):
# ndim = fe_type.ndim
members = [('data', types.CPointer(fe_type.dtype)),
('meminfo', types.MemInfoPointer(fe_type.dtype))]
super(ArrayCTypesModel, self).__init__(dmm, fe_type, members)
@register_default(types.RangeType)
class RangeModel(StructModel):
def __init__(self, dmm, fe_type):
int_type = fe_type.iterator_type.yield_type
members = [('start', int_type),
('stop', int_type),
('step', int_type)]
super(RangeModel, self).__init__(dmm, fe_type, members)
# =============================================================================
@register_default(types.NumpyNdIndexType)
class NdIndexModel(StructModel):
def __init__(self, dmm, fe_type):
ndim = fe_type.ndim
members = [('shape', types.UniTuple(types.intp, ndim)),
('indices', types.EphemeralArray(types.intp, ndim)),
('exhausted', types.EphemeralPointer(types.boolean)),
]
super(NdIndexModel, self).__init__(dmm, fe_type, members)
@register_default(types.NumpyFlatType)
def handle_numpy_flat_type(dmm, ty):
if ty.array_type.layout == 'C':
return CContiguousFlatIter(dmm, ty, need_indices=False)
else:
return FlatIter(dmm, ty)
@register_default(types.NumpyNdEnumerateType)
def handle_numpy_ndenumerate_type(dmm, ty):
if ty.array_type.layout == 'C':
return CContiguousFlatIter(dmm, ty, need_indices=True)
else:
return FlatIter(dmm, ty)
@register_default(types.BoundFunction)
def handle_bound_function(dmm, ty):
# The same as the underlying type
return dmm[ty.this]
@register_default(types.NumpyNdIterType)
class NdIter(StructModel):
def __init__(self, dmm, fe_type):
array_types = fe_type.arrays
ndim = fe_type.ndim
shape_len = ndim if fe_type.need_shaped_indexing else 1
members = [('exhausted', types.EphemeralPointer(types.boolean)),
('arrays', types.Tuple(array_types)),
# The iterator's main shape and indices
('shape', types.UniTuple(types.intp, shape_len)),
('indices', types.EphemeralArray(types.intp, shape_len)),
]
# Indexing state for the various sub-iterators
# XXX use a tuple instead?
for i, sub in enumerate(fe_type.indexers):
kind, start_dim, end_dim, _ = sub
member_name = 'index%d' % i
if kind == 'flat':
# A single index into the flattened array
members.append((member_name, types.EphemeralPointer(types.intp)))
elif kind in ('scalar', 'indexed', '0d'):
# Nothing required
pass
else:
assert 0
# Slots holding values of the scalar args
# XXX use a tuple instead?
for i, ty in enumerate(fe_type.arrays):
if not isinstance(ty, types.Array):
member_name = 'scalar%d' % i
members.append((member_name, types.EphemeralPointer(ty)))
super(NdIter, self).__init__(dmm, fe_type, members)
@register_default(types.DeferredType)
class DeferredStructModel(CompositeModel):
def __init__(self, dmm, fe_type):
super(DeferredStructModel, self).__init__(dmm, fe_type)
self.typename = "deferred.{0}".format(id(fe_type))
self.actual_fe_type = fe_type.get()
def get_value_type(self):
return ir.global_context.get_identified_type(self.typename + '.value')
def get_data_type(self):
return ir.global_context.get_identified_type(self.typename + '.data')
def get_argument_type(self):
return self._actual_model.get_argument_type()
def as_argument(self, builder, value):
inner = self.get(builder, value)
return self._actual_model.as_argument(builder, inner)
def from_argument(self, builder, value):
res = self._actual_model.from_argument(builder, value)
return self.set(builder, self.make_uninitialized(), res)
def from_data(self, builder, value):
self._define()
elem = self.get(builder, value)
value = self._actual_model.from_data(builder, elem)
out = self.make_uninitialized()
return self.set(builder, out, value)
def as_data(self, builder, value):
self._define()
elem = self.get(builder, value)
value = self._actual_model.as_data(builder, elem)
out = self.make_uninitialized(kind='data')
return self.set(builder, out, value)
def from_return(self, builder, value):
return value
def as_return(self, builder, value):
return value
def get(self, builder, value):
return builder.extract_value(value, [0])
def set(self, builder, value, content):
return builder.insert_value(value, content, [0])
def make_uninitialized(self, kind='value'):
self._define()
if kind == 'value':
ty = self.get_value_type()
else:
ty = self.get_data_type()
return ir.Constant(ty, ir.Undefined)
def _define(self):
valty = self.get_value_type()
self._define_value_type(valty)
datty = self.get_data_type()
self._define_data_type(datty)
def _define_value_type(self, value_type):
if value_type.is_opaque:
value_type.set_body(self._actual_model.get_value_type())
def _define_data_type(self, data_type):
if data_type.is_opaque:
data_type.set_body(self._actual_model.get_data_type())
@property
def _actual_model(self):
return self._dmm.lookup(self.actual_fe_type)
def traverse(self, builder):
return [(self.actual_fe_type,
lambda value: builder.extract_value(value, [0]))]
@register_default(types.StructRefPayload)
class StructPayloadModel(StructModel):
"""Model for the payload of a mutable struct
"""
def __init__(self, dmm, fe_typ):
members = tuple(fe_typ.field_dict.items())
super().__init__(dmm, fe_typ, members)
class StructRefModel(StructModel):
"""Model for a mutable struct.
A reference to the payload
"""
def __init__(self, dmm, fe_typ):
dtype = fe_typ.get_data_type()
members = [
("meminfo", types.MemInfoPointer(dtype)),
]
super().__init__(dmm, fe_typ, members)
| {
"repo_name": "sklam/numba",
"path": "numba/core/datamodel/models.py",
"copies": "1",
"size": "44008",
"license": "bsd-2-clause",
"hash": -8488065218421940000,
"line_mean": 31.146092038,
"line_max": 83,
"alpha_frac": 0.6064806399,
"autogenerated": false,
"ratio": 3.774271012006861,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9875947860181022,
"avg_score": 0.0009607583451676568,
"num_lines": 1369
} |
from functools import partial
from .matchers import MatchCriteria
try:
import asyncio
except ImportError:
asyncio = None
class Behaviour(object):
def __init__(self, name=None, match_criteria=None, result=None):
self.name = name
self.match_criteria = match_criteria
self.results = []
self.result_index = 0
if result:
self.results.append(result)
def get_result(self, args=None, kwargs=None):
result = self.results[self.result_index]
if self.result_index != len(self.results) - 1:
self.result_index += 1
return result.get_result(args or [], kwargs or {})
def add_result(self, result):
self.results.append(result)
def set_match_criteria(self, match_criteria):
self.match_criteria = match_criteria
def matches(self, args, kwargs):
return self.match_criteria.matches(args, kwargs)
class Result(object):
def __init__(self, future=False):
self.future = future
if future and not asyncio:
raise RuntimeError('Can only use async feature with Python 3.5+')
def _get_result(self, args, kwargs):
raise NotImplementedError()
def get_result(self, args, kwargs):
if self.future:
return asyncio.coroutine(partial(self._get_result, args, kwargs))()
else:
return self._get_result(args, kwargs)
class ValueResult(Result):
def __init__(self, value, future=False):
super(ValueResult, self).__init__(future)
self.value = value
def __eq__(self, other):
return isinstance(other, ValueResult) and self.value == other.value
def _get_result(self, args, kwargs):
return self.value
class ErrorResult(Result):
def __init__(self, exception, future=False):
super(ErrorResult, self).__init__(future)
self.exception = exception
def __eq__(self, other):
return isinstance(other, ErrorResult) and self.exception == other.exception
def _get_result(self, args, kwargs):
raise self.exception
class ComputationResult(Result):
def __init__(self, function, future=False):
super(ComputationResult, self).__init__(future)
self.function = function
def __eq__(self, other):
return isinstance(other, ComputationResult) and self.function == other.function
def _get_result(self, args, kwargs):
return self.function(*args, **kwargs)
class BehaviourBuilder(object):
def __init__(self, mock, behaviour=None):
self.mock = mock
behaviour = behaviour or Behaviour()
self.behaviour = behaviour
self.name_defined = behaviour.name is not None
self.match_criteria_defined = behaviour.match_criteria is not None
def __getattribute__(self, name):
name_has_been_defined = super(BehaviourBuilder, self).__getattribute__('name_defined')
if not name_has_been_defined:
self.name_defined = True
self.define_behaviour_name(name)
return self
return super(BehaviourBuilder, self).__getattribute__(name)
def __call__(self, *args, **kwargs):
name_has_been_defined = super(BehaviourBuilder, self).__getattribute__('name_defined')
if not name_has_been_defined:
self.name_defined = True
self.define_behaviour_name('__call__')
match_criteria_has_been_defined = super(BehaviourBuilder, self).__getattribute__('match_criteria_defined')
if not match_criteria_has_been_defined:
self.match_criteria_defined = True
self.define_match_criteria(args, kwargs)
return self
raise TypeError("'{name}' object is not callable".format(name=self.__class__.__name__))
def __getitem__(self, item):
name_has_been_defined = super(BehaviourBuilder, self).__getattribute__('name_defined')
if not name_has_been_defined:
self.name_defined = True
self.define_behaviour_name('__getitem__')
self.match_criteria_defined = True
self.define_match_criteria((item,), {})
return self
raise TypeError("'{name}' object has no attribute __getitem__".format(name=self.__class__.__name__))
def define_behaviour_name(self, name):
self.behaviour.name = name
def define_match_criteria(self, args, kwargs):
match_criteria = MatchCriteria(args, kwargs)
self.behaviour.set_match_criteria(match_criteria)
if self.behaviour.name == '__getitem__':
self.mock._add_item_behaviour(self.behaviour)
else:
self.mock._add_method_behaviour(self.behaviour)
def then_return(self, value):
self._add_result(ValueResult(value))
return self
def then_raise(self, exception):
self._add_result(ErrorResult(exception))
return self
def then_compute(self, function):
self._add_result(ComputationResult(function))
return self
def then_return_future(self, value):
self._add_result(ValueResult(value, future=True))
return self
def then_raise_future(self, exception):
self._add_result(ErrorResult(exception, future=True))
return self
def then_compute_future(self, function):
self._add_result(ComputationResult(function, future=True))
return self
def _add_result(self, result):
if not self.match_criteria_defined:
self.match_criteria_defined = True
self.mock._add_property_behaviour(self.behaviour)
self.behaviour.add_result(result)
| {
"repo_name": "atbentley/pock",
"path": "pock/behaviour.py",
"copies": "1",
"size": "5619",
"license": "mit",
"hash": -1188952334464354600,
"line_mean": 32.4464285714,
"line_max": 114,
"alpha_frac": 0.6328528208,
"autogenerated": false,
"ratio": 3.987934705464869,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5120787526264869,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from menpo.feature import no_op
from menpofit.math import (IIRLRegression, IRLRegression, PCRRegression,
OptimalLinearRegression, OPPRegression)
from menpofit.modelinstance import OrthoPDM
from menpofit.error import euclidean_bb_normalised_error
from menpofit.result import MultiScaleParametricIterativeResult
from .base import (BaseSupervisedDescentAlgorithm,
compute_parametric_delta_x, features_per_image,
features_per_patch, update_parametric_estimates,
print_parametric_info, fit_parametric_shape)
class ParametricShapeSDAlgorithm(BaseSupervisedDescentAlgorithm):
r"""
Abstract class for training a cascaded-regression Supervised Descent
algorithm that employs a parametric shape model.
Parameters
----------
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
"""
def __init__(self, shape_model_cls=OrthoPDM):
super(ParametricShapeSDAlgorithm, self).__init__()
self.regressors = []
self.shape_model_cls = shape_model_cls
self.shape_model = None
@property
def _multi_scale_fitter_result(self):
# The result class to be used by a multi-scale fitter
return MultiScaleParametricIterativeResult
def _compute_delta_x(self, gt_shapes, current_shapes):
# This is called first - so train shape model here
if self.shape_model is None:
self.shape_model = self.shape_model_cls(gt_shapes)
return compute_parametric_delta_x(gt_shapes, current_shapes,
self.shape_model)
def _update_estimates(self, estimated_delta_x, delta_x, gt_x,
current_shapes):
update_parametric_estimates(estimated_delta_x, delta_x, gt_x,
current_shapes, self.shape_model)
def _compute_training_features(self, images, gt_shapes, current_shapes,
prefix='', verbose=False):
# initialize sample counter
return features_per_image(images, current_shapes, self.patch_shape,
self.patch_features, prefix=prefix,
verbose=verbose)
def _compute_test_features(self, image, current_shape):
return features_per_patch(image, current_shape,
self.patch_shape, self.patch_features)
def run(self, image, initial_shape, gt_shape=None, return_costs=False,
**kwargs):
r"""
Run the algorithm to an image given an initial shape.
Parameters
----------
image : `menpo.image.Image` or subclass
The image to be fitted.
initial_shape : `menpo.shape.PointCloud`
The initial shape from which the fitting procedure will start.
gt_shape : `menpo.shape.PointCloud` or ``None``, optional
The ground truth shape associated to the image.
return_costs : `bool`, optional
If ``True``, then the cost function values will be computed
during the fitting procedure. Then these cost values will be
assigned to the returned `fitting_result`. *Note that this
argument currently has no effect and will raise a warning if set
to ``True``. This is because it is not possible to evaluate the
cost function of this algorithm.*
Returns
-------
fitting_result: :map:`ParametricIterativeResult`
The result of the fitting procedure.
"""
return fit_parametric_shape(image, initial_shape, self,
gt_shape=gt_shape,
return_costs=return_costs)
def _print_regression_info(self, _, gt_shapes, n_perturbations,
delta_x, estimated_delta_x, level_index,
prefix=''):
print_parametric_info(self.shape_model, gt_shapes, n_perturbations,
delta_x, estimated_delta_x, level_index,
self._compute_error, prefix=prefix)
class ParametricShapeNewton(ParametricShapeSDAlgorithm):
r"""
Class for training a cascaded-regression algorithm that employs a
parametric shape model using Incremental Regularized Linear Regression
(:map:`IRLRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
alpha : `float`, optional
The regularization parameter.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, shape_model_cls=OrthoPDM,
compute_error=euclidean_bb_normalised_error,
alpha=0, bias=True):
super(ParametricShapeNewton, self).__init__(
shape_model_cls=shape_model_cls)
self._regressor_cls = partial(IRLRegression, alpha=alpha, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class ParametricShapeGaussNewton(ParametricShapeSDAlgorithm):
r"""
Class for training a cascaded-regression algorithm that employs a
parametric shape model using Indirect Incremental Regularized Linear
Regression (:map:`IIRLRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
alpha : `float`, optional
The regularization parameter.
bias : `bool`, optional
Flag that controls whether to use a bias term.
alpha2 : `float`, optional
The regularization parameter of the Hessian matrix.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, shape_model_cls=OrthoPDM,
compute_error=euclidean_bb_normalised_error,
alpha=0, bias=True, alpha2=0):
super(ParametricShapeGaussNewton, self).__init__(
shape_model_cls=shape_model_cls)
self._regressor_cls = partial(IIRLRegression, alpha=alpha, bias=bias,
alpha2=alpha2)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class ParametricShapeOptimalRegression(ParametricShapeSDAlgorithm):
r"""
Class for training a cascaded-regression algorithm that employs a parametric
shape model using Multivariate Linear Regression with optimal
reconstructions (:map:`OptimalLinearRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
variance : `float` or ``None``, optional
The SVD variance.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, shape_model_cls=OrthoPDM,
compute_error=euclidean_bb_normalised_error,
variance=None, bias=True):
super(ParametricShapeOptimalRegression, self).__init__(
shape_model_cls=shape_model_cls)
self._regressor_cls = partial(OptimalLinearRegression,
variance=variance, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class ParametricShapePCRRegression(ParametricShapeSDAlgorithm):
r"""
Class for training a cascaded-regression algorithm that employs a parametric
shape model using Principal Component Regression (:map:`PCRRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
variance : `float` or ``None``, optional
The SVD variance.
bias : `bool`, optional
Flag that controls whether to use a bias term.
Raises
------
ValueError
variance must be set to a number between 0 and 1
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, shape_model_cls=OrthoPDM,
compute_error=euclidean_bb_normalised_error,
variance=None, bias=True):
super(ParametricShapePCRRegression, self).__init__(
shape_model_cls=shape_model_cls)
self._regressor_cls = partial(PCRRegression,
variance=variance, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class ParametricShapeOPPRegression(ParametricShapeSDAlgorithm):
r"""
Class for training a cascaded-regression algorithm that employs a parametric
shape model using Multivariate Linear Regression with Orthogonal Procrustes
Problem reconstructions (:map:`OPPRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
shape_model_cls : `subclass` of :map:`PDM`, optional
The class to be used for building the shape model. The most common
choice is :map:`OrthoPDM`.
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
whiten : `bool`, optional
Whether to use a whitened PCA model.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, shape_model_cls=OrthoPDM,
compute_error=euclidean_bb_normalised_error,
whiten=False, bias=True):
super(ParametricShapeOPPRegression, self).__init__(
shape_model_cls=shape_model_cls)
self._regressor_cls = partial(OPPRegression,
whiten=whiten, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
| {
"repo_name": "yuxiang-zhou/menpofit",
"path": "menpofit/sdm/algorithm/parametricshape.py",
"copies": "6",
"size": "12770",
"license": "bsd-3-clause",
"hash": -2097104099410488800,
"line_mean": 41.1452145215,
"line_max": 80,
"alpha_frac": 0.6312451057,
"autogenerated": false,
"ratio": 4.337635869565218,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00012223444566678888,
"num_lines": 303
} |
from functools import partial
from menpo.feature import no_op
from menpofit.result import MultiScaleNonParametricIterativeResult
from menpofit.error import euclidean_bb_normalised_error
from menpofit.math import (IIRLRegression, IRLRegression, PCRRegression,
OptimalLinearRegression, OPPRegression)
from .base import (BaseSupervisedDescentAlgorithm,
compute_non_parametric_delta_x, features_per_image,
features_per_patch, update_non_parametric_estimates,
print_non_parametric_info, fit_non_parametric_shape)
class NonParametricSDAlgorithm(BaseSupervisedDescentAlgorithm):
r"""
Abstract class for training a non-parametric cascaded-regression Supervised
Descent algorithm.
"""
def __init__(self):
super(NonParametricSDAlgorithm, self).__init__()
self.regressors = []
@property
def _multi_scale_fitter_result(self):
# The result class to be used by a multi-scale fitter
return MultiScaleNonParametricIterativeResult
def _compute_delta_x(self, gt_shapes, current_shapes):
return compute_non_parametric_delta_x(gt_shapes, current_shapes)
def _update_estimates(self, estimated_delta_x, delta_x, gt_x,
current_shapes):
update_non_parametric_estimates(estimated_delta_x, delta_x, gt_x,
current_shapes)
def _compute_training_features(self, images, gt_shapes, current_shapes,
prefix='', verbose=False):
return features_per_image(images, current_shapes, self.patch_shape,
self.patch_features, prefix=prefix,
verbose=verbose)
def _compute_test_features(self, image, current_shape):
return features_per_patch(image, current_shape,
self.patch_shape, self.patch_features)
def run(self, image, initial_shape, gt_shape=None, return_costs=False,
**kwargs):
r"""
Run the algorithm to an image given an initial shape.
Parameters
----------
image : `menpo.image.Image` or subclass
The image to be fitted.
initial_shape : `menpo.shape.PointCloud`
The initial shape from which the fitting procedure will start.
gt_shape : class : `menpo.shape.PointCloud` or ``None``, optional
The ground truth shape associated to the image.
return_costs : `bool`, optional
If ``True``, then the cost function values will be computed
during the fitting procedure. Then these cost values will be
assigned to the returned `fitting_result`. *Note that this
argument currently has no effect and will raise a warning if set
to ``True``. This is because it is not possible to evaluate the
cost function of this algorithm.*
Returns
-------
fitting_result: :map:`NonParametricIterativeResult`
The result of the fitting procedure.
"""
return fit_non_parametric_shape(image, initial_shape, self,
gt_shape=gt_shape,
return_costs=return_costs)
def _print_regression_info(self, template_shape, gt_shapes, n_perturbations,
delta_x, estimated_delta_x, level_index,
prefix=''):
print_non_parametric_info(template_shape, gt_shapes, n_perturbations,
delta_x, estimated_delta_x, level_index,
self._compute_error, prefix=prefix)
class NonParametricNewton(NonParametricSDAlgorithm):
r"""
Class for training a non-parametric cascaded-regression algorithm using
Incremental Regularized Linear Regression (:map:`IRLRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
alpha : `float`, optional
The regularization parameter.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, compute_error=euclidean_bb_normalised_error,
alpha=0, bias=True):
super(NonParametricNewton, self).__init__()
self._regressor_cls = partial(IRLRegression, alpha=alpha, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class NonParametricGaussNewton(NonParametricSDAlgorithm):
r"""
Class for training a non-parametric cascaded-regression algorithm using
Indirect Incremental Regularized Linear Regression (:map:`IIRLRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
alpha : `float`, optional
The regularization parameter.
bias : `bool`, optional
Flag that controls whether to use a bias term.
alpha2 : `float`, optional
The regularization parameter of the Hessian matrix.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, compute_error=euclidean_bb_normalised_error,
alpha=0, bias=True, alpha2=0):
super(NonParametricGaussNewton, self).__init__()
self._regressor_cls = partial(IIRLRegression, alpha=alpha, bias=bias,
alpha2=alpha2)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class NonParametricPCRRegression(NonParametricSDAlgorithm):
r"""
Class for training a non-parametric cascaded-regression algorithm using
Principal Component Regression (:map:`PCRRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
variance : `float` or ``None``, optional
The SVD variance.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, compute_error=euclidean_bb_normalised_error,
variance=None, bias=True):
super(NonParametricPCRRegression, self).__init__()
self._regressor_cls = partial(PCRRegression, variance=variance,
bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class NonParametricOptimalRegression(NonParametricSDAlgorithm):
r"""
Class for training a non-parametric cascaded-regression algorithm using
Multivariate Linear Regression with optimal reconstructions
(:map:`OptimalLinearRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
variance : `float` or ``None``, optional
The SVD variance.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, compute_error=euclidean_bb_normalised_error,
variance=None, bias=True):
super(NonParametricOptimalRegression, self).__init__()
self._regressor_cls = partial(OptimalLinearRegression,
variance=variance, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
class NonParametricOPPRegression(NonParametricSDAlgorithm):
r"""
Class for training a non-parametric cascaded-regression algorithm using
Multivariate Linear Regression with Orthogonal Procrustes Problem
reconstructions (:map:`OPPRegression`).
Parameters
----------
patch_features : `callable`, optional
The features to be extracted from the patches of an image.
patch_shape : `(int, int)`, optional
The shape of the extracted patches.
n_iterations : `int`, optional
The number of iterations (cascades).
compute_error : `callable`, optional
The function to be used for computing the fitting error when training
each cascade.
bias : `bool`, optional
Flag that controls whether to use a bias term.
"""
def __init__(self, patch_features=no_op, patch_shape=(17, 17),
n_iterations=3, compute_error=euclidean_bb_normalised_error,
bias=True):
super(NonParametricOPPRegression, self).__init__()
self._regressor_cls = partial(OPPRegression, bias=bias)
self.patch_shape = patch_shape
self.patch_features = patch_features
self.n_iterations = n_iterations
self._compute_error = compute_error
| {
"repo_name": "grigorisg9gr/menpofit",
"path": "menpofit/sdm/algorithm/nonparametric.py",
"copies": "6",
"size": "10542",
"license": "bsd-3-clause",
"hash": -5568107345077107000,
"line_mean": 40.6679841897,
"line_max": 80,
"alpha_frac": 0.6338455701,
"autogenerated": false,
"ratio": 4.4537389100126745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8087584480112675,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from mission.framework.helpers import call_if_function, within_deadband
from mission.framework.task import Task
from shm import kalman
from shm import navigation_desires as desires
from shm.navigation_settings import position_controls
class PositionalControlNeedy(Task):
def on_first_run(self, *args, **kwargs):
if 'positional_controls' in kwargs and \
kwargs['positional_controls'] is not None:
position_controls.set(kwargs['positional_controls'])
def clamp_target_to_range(target, min_target=None, max_target=None):
target, min_target, max_target = call_if_function(target), call_if_function(min_target), call_if_function(max_target)
if min_target is not None and max_target is not None and min_target > max_target:
raise Exception("min_target is greater than max_target")
if min_target is not None and target < min_target:
target = min_target
if max_target is not None and target > max_target:
target = max_target
return target
class Setter(PositionalControlNeedy):
"""Generic setter which also checks the end condition"""
def on_run(self, target, desire_setter, current, default_error, error=None, modulo_error=False, min_target=None, max_target=None, *args, **kwargs):
"""
Note: This does not 0 the desire when completed.
:param target: A Number or function that when called with no arguments returns a Number that represents the
value to be targeted.
:param desire_setter: A SHM variable (object with a set method) that will be called with a single argument to target.
:param current: A Number or function that when called with no arguments returns the current value as a Number.
:param error: A Number representing the allowed error before a wrapper is finished.
:param modulo_error: a Boolean that is true only if the error calculated should be with respect to modulo 360.
"""
if error is None:
error = default_error
target, current = call_if_function(target), call_if_function(current)
target = clamp_target_to_range(target=target, min_target=min_target, max_target=max_target)
desire_setter(target)
if within_deadband(target, current, error, use_mod_error=modulo_error):
self.finish()
class RelativeToInitialSetter(PositionalControlNeedy):
"""Generic setter relative to initial value"""
def on_first_run(self, *args, **kwargs):
super().on_first_run(*args, **kwargs)
self.initial_value = call_if_function(kwargs['current'])
def on_run(self, offset, desire_setter, current, default_error, error=None, modulo_error=False, min_target=None, max_target=None, *args, **kwargs):
"""
Note: This does not 0 the desire when completed.
:param offset: A Number or function that when called with no arguments returns a Number that represents the
value to be targeted. This offset will be added to the current value on the first run.
:param desire_setter: A SHM variable (object with a set method) that will be called with a single argument to target.
:param current: A Number or function that when called with no arguments returns the current value as a Number.
:param error: A Number representing the allowed error before a wrapper is finished.
:param modulo_error: a Boolean that is true only if the error calculated should be with respect to modulo 360.
"""
if error is None:
error = default_error
offset, current = call_if_function(offset), call_if_function(current)
target = self.initial_value + offset
target = clamp_target_to_range(target=target, min_target=min_target, max_target=max_target)
desire_setter(target)
if within_deadband(self.initial_value + offset, current, error, use_mod_error=modulo_error):
self.finish()
class RelativeToCurrentSetter(PositionalControlNeedy):
"""Generic setter relative to current value"""
def on_run(self, offset, desire_setter, current, default_error, error=None, modulo_error=False, min_target=None, max_target=None, *args, **kwargs):
"""
Note: This does not 0 the desire when completed.
:param offset: A Number or function that when called with no arguments returns a Number that represents the
value to be targeted. This offset will be added to the current value.
:param desire_setter: A SHM variable (object with a set method) that will be called with a single argument to
target.
:param current: A Number or function that when called with no arguments returns the current value as a Number.
:param error: A Number representing the allowed error before a wrapper is finished.
:param modulo_error: a Boolean that is true only if the error calculated should be with respect to modulo 360.
"""
if error is None:
error = default_error
offset, current = call_if_function(offset), call_if_function(current)
target = current + offset
target = clamp_target_to_range(target=target, min_target=min_target, max_target=max_target)
desire_setter(target)
if within_deadband(current + offset, current, error, use_mod_error=modulo_error):
self.finish()
class VelocitySetter(PositionalControlNeedy):
"""Generic setter that simulates velocity controller using a positional controller"""
def on_first_run(self, *args, **kwargs):
super().on_first_run(*args, **kwargs)
self.relative_to_current_setter = RelativeToCurrentSetter()
def on_run(self, velocity, desire_setter, current, default_error, target=None, error=None, modulo_error=False, min_target=None, max_target=None, *args, **kwargs):
"""
Note: This does not 0 the desire when completed.
:param velocity: A Number or function that when called with no arguments returns a Number that represents the
value to be targeted. This target will be multiplied with the time in seconds from the last call and be added to
the current value.
:param desire_setter: A SHM variable (object with a set method) that will be called with a single argument to
target.
:param current: A Number or function that when called with no arguments returns the current value as a Number.
:param target: A Number (or function) that represents the target velocity (units/second).
:param error: A Number representing the allowed error before a wrapper is finished.
:param modulo_error: a Boolean that is true only if the error calculated should be with respect to modulo 360.
"""
if error is None:
error = default_error
velocity, current, target = call_if_function(velocity), call_if_function(current), call_if_function(target)
target_for_velocity = velocity * (self.this_run_time - self.last_run_time)
self.relative_to_current_setter.on_run(offset=target_for_velocity, desire_setter=desire_setter, current=current,
error=error, modulo_error=modulo_error, min_target=min_target, max_target=max_target)
if target is not None or within_deadband(target, current, error, use_mod_error=modulo_error):
if target is not None:
desire_setter()
self.finish()
else:
desire_setter()
def generate_setters(**kwargs):
return (partial(MetaSetter, **kwargs) for MetaSetter in (Setter, RelativeToInitialSetter, RelativeToCurrentSetter, VelocitySetter))
Heading, RelativeToInitialHeading, RelativeToCurrentHeading, VelocityHeading = \
generate_setters(desire_setter=desires.heading.set, current=kalman.heading.get, modulo_error=True, default_error=3)
Pitch, RelativeToInitialPitch, RelativeToCurrentPitch, VelocityPitch = \
generate_setters(desire_setter=desires.pitch.set, current=kalman.pitch.get, modulo_error=True, default_error=10)
Roll, RelativeToInitialRoll, RelativeToCurrentRoll, VelocityRoll = \
generate_setters(desire_setter=desires.roll.set, current=kalman.roll.get, modulo_error=True, default_error=10)
Depth, RelativeToInitialDepth, RelativeToCurrentDepth, VelocityDepth = \
generate_setters(desire_setter=desires.depth.set, current=kalman.depth.get, modulo_error=False, default_error=0.07)
VelocityX, RelativeToInitialVelocityX, RelativeToCurrentVelocityX, VelocityVelocityX = \
generate_setters(desire_setter=desires.speed.set, current=kalman.velx.get, modulo_error=False, default_error=0.05, positional_controls=False)
VelocityY, RelativeToInitialVelocityY, RelativeToCurrentVelocityY, VelocityVelocityY = \
generate_setters(desire_setter=desires.sway_speed.set, current=kalman.vely.get, modulo_error=False, default_error=0.05, positional_controls=False)
PositionN, RelativeToInitialPositionN, RelativeToCurrentPositionN, VelocityPositionN = \
generate_setters(desire_setter=desires.north.set, current=kalman.north.get, modulo_error=False, default_error=0.05, positional_controls=True)
PositionE, RelativeToInitialPositionE, RelativeToCurrentPositionE, VelocityPositionE = \
generate_setters(desire_setter=desires.east.set, current=kalman.east.get, modulo_error=False, default_error=0.05, positional_controls=True)
| {
"repo_name": "cuauv/software",
"path": "mission/framework/movement.py",
"copies": "1",
"size": "9411",
"license": "bsd-3-clause",
"hash": 6241681077874518000,
"line_mean": 52.1694915254,
"line_max": 166,
"alpha_frac": 0.7137392413,
"autogenerated": false,
"ratio": 3.8904505994212486,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5104189840721248,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .modifiers import DimensionModifier
class NumericInterval(DimensionModifier):
def __init__(self, dimension, size=1, offset=0):
self.size = size
self.offset = offset
super().__init__(dimension)
def __eq__(self, other):
return all([isinstance(other, NumericInterval), self.size == other.size, self.offset == other.offset])
def __hash__(self):
return hash(repr(self))
DATETIME_INTERVALS = ('hour', 'day', 'week', 'month', 'quarter', 'year')
class DatetimeInterval(DimensionModifier):
def __init__(self, dimension, interval_key):
super().__init__(dimension)
self.interval_key = interval_key
def __eq__(self, other):
return isinstance(other, DatetimeInterval) and self.alias == other.alias
def __repr__(self):
wrapped_key = super().__getattribute__('wrapped_key')
wrapped = super().__getattribute__(wrapped_key)
return '{}({})'.format(self.interval_key, repr(wrapped))
def __hash__(self):
return hash(repr(self))
hour, day, week, month, quarter, year = [partial(DatetimeInterval, interval_key=key) for key in DATETIME_INTERVALS]
| {
"repo_name": "kayak/fireant",
"path": "fireant/dataset/intervals.py",
"copies": "2",
"size": "1197",
"license": "apache-2.0",
"hash": -7794699138981150000,
"line_mean": 29.6923076923,
"line_max": 115,
"alpha_frac": 0.6390977444,
"autogenerated": false,
"ratio": 4.016778523489933,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5655876267889933,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from mongoengine.queryset.queryset import QuerySet
__all__ = ("queryset_manager", "QuerySetManager")
class QuerySetManager:
"""
The default QuerySet Manager.
Custom QuerySet Manager functions can extend this class and users can
add extra queryset functionality. Any custom manager methods must accept a
:class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument.
The method function should return a :class:`~mongoengine.queryset.QuerySet`
, probably the same one that was passed in, but modified in some way.
"""
get_queryset = None
default = QuerySet
def __init__(self, queryset_func=None):
if queryset_func:
self.get_queryset = queryset_func
def __get__(self, instance, owner):
"""Descriptor for instantiating a new QuerySet object when
Document.objects is accessed.
"""
if instance is not None:
# Document object being used rather than a document class
return self
# owner is the document that contains the QuerySetManager
queryset_class = owner._meta.get("queryset_class", self.default)
queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset:
arg_count = self.get_queryset.__code__.co_argcount
if arg_count == 1:
queryset = self.get_queryset(queryset)
elif arg_count == 2:
queryset = self.get_queryset(owner, queryset)
else:
queryset = partial(self.get_queryset, owner, queryset)
return queryset
def queryset_manager(func):
"""Decorator that allows you to define custom QuerySet managers on
:class:`~mongoengine.Document` classes. The manager must be a function that
accepts a :class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
the same one that was passed in, but modified in some way.
"""
return QuerySetManager(func)
| {
"repo_name": "MongoEngine/mongoengine",
"path": "mongoengine/queryset/manager.py",
"copies": "1",
"size": "2222",
"license": "mit",
"hash": -6966146728268480000,
"line_mean": 37.3103448276,
"line_max": 79,
"alpha_frac": 0.6710171017,
"autogenerated": false,
"ratio": 4.6582809224318655,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 58
} |
from functools import partial
from multipledispatch import Dispatcher
import ibis.common.exceptions as com
import ibis.expr.operations as ops
from ibis.backends.base import Client
from ibis.expr.scope import Scope
from .trace import TraceTwoLevelDispatcher
# Individual operation execution
execute_node = TraceTwoLevelDispatcher(
'execute_node',
doc=(
'Execute an individual operation given the operation and its computed '
'arguments'
),
)
@execute_node.register(ops.Node)
def execute_node_without_scope(node, **kwargs):
raise com.UnboundExpressionError(
(
'Node of type {!r} has no data bound to it. '
'You probably tried to execute an expression without a data '
'source.'
).format(type(node).__name__)
)
pre_execute = Dispatcher(
'pre_execute',
doc="""\
Given a node, compute a (possibly partial) scope prior to standard execution.
Notes
-----
This function is useful if parts of the tree structure need to be executed at
the same time or if there are other reasons to need to interrupt the regular
depth-first traversal of the tree.
""",
)
# Default returns an empty scope
@pre_execute.register(ops.Node)
@pre_execute.register(ops.Node, Client)
def pre_execute_default(node, *clients, **kwargs):
return Scope()
# Merge the results of all client pre-execution with scope
@pre_execute.register(ops.Node, [Client])
def pre_execute_multiple_clients(node, *clients, scope=None, **kwargs):
scope = scope.merge_scopes(
list(map(partial(pre_execute, node, scope=scope, **kwargs), clients))
)
return scope
execute_literal = Dispatcher(
'execute_literal',
doc="""\
Special case literal execution to avoid the dispatching overhead of
``execute_node``.
Parameters
----------
op : ibis.expr.operations.Node
value : object
The literal value of the object, e.g., int, float.
datatype : ibis.expr.datatypes.DataType
Used to specialize on expressions whose underlying value is of a different
type than its would-be type. For example, interval values are represented
by an integer.
""",
)
post_execute = Dispatcher(
'post_execute',
doc="""\
Execute code on the result of a computation.
Parameters
----------
op : ibis.expr.operations.Node
The operation that was just executed
data : object
The result of the computation
""",
)
@post_execute.register(ops.Node, object)
def post_execute_default(op, data, **kwargs):
return data
execute = Dispatcher("execute")
| {
"repo_name": "ibis-project/ibis",
"path": "ibis/backends/pandas/dispatch.py",
"copies": "1",
"size": "2532",
"license": "apache-2.0",
"hash": -7841277293009019000,
"line_mean": 23.8235294118,
"line_max": 79,
"alpha_frac": 0.7045813586,
"autogenerated": false,
"ratio": 3.871559633027523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 102
} |
from functools import partial
from nose.tools import assert_is_none
from nose.tools import eq_
from rest_framework.compat import RequestFactory
from rest_framework.relations import PKOnlyObject
from rest_framework.request import Request
from rest_framework.reverse import reverse
from drf_nested_resources.fields import HyperlinkedNestedIdentityField
from drf_nested_resources.fields import HyperlinkedNestedRelatedField
from drf_nested_resources.routers import NestedResource
from drf_nested_resources.routers import Resource
from drf_nested_resources.routers import make_urlpatterns_from_resources
from tests._testcases import FixtureTestCase
from tests.django_project.app.models import ProgrammingLanguageVersion
from tests.django_project.app.views import DeveloperViewSet
from tests.django_project.app.views import ProgrammingLanguageVersionViewSet
from tests.django_project.app.views import ProgrammingLanguageViewSet
class TestIdentityField(object):
_SOURCE_VIEW_NAME = 'children'
_DESTINATION_VIEW_NAME = 'child'
_URLVARS_BY_VIEW_NAME = \
{'children': ('parent', ), 'child': ('parent', 'child')}
def setup(self):
self._django_request = \
_make_django_request(self._SOURCE_VIEW_NAME, {'parent': 'foo'})
def test_url_generation(self):
url = self._make_url_with_kwargs_via_field('foo')
expected_url = self._make_url_with_kwargs('foo')
eq_(expected_url, url)
def test_url_generation_with_explicit_format(self):
url = self._make_url_with_kwargs_via_field('foo', 'xml')
expected_url = self._make_url_with_kwargs('foo', 'xml')
eq_(expected_url, url)
def test_unsaved_resource(self):
url = self._make_url_with_kwargs_via_field(None)
assert_is_none(url)
def _make_url_with_kwargs_via_field(self, pk, format_=None):
object_ = PKOnlyObject(pk)
drf_request = _make_drf_request(self._django_request)
field = HyperlinkedNestedIdentityField(
self._SOURCE_VIEW_NAME,
self._URLVARS_BY_VIEW_NAME,
)
url = field.get_url(
object_,
self._DESTINATION_VIEW_NAME,
drf_request,
format_,
)
return url
def _make_url_with_kwargs(self, pk, format_=None):
source_view_kwargs = self._django_request.resolver_match[2]
destination_view_kwargs = dict(source_view_kwargs, child=pk)
url_path = reverse(
self._DESTINATION_VIEW_NAME,
kwargs=destination_view_kwargs,
format=format_,
)
url = self._django_request.build_absolute_uri(url_path)
return url
class TestRelatedLinkedField(FixtureTestCase):
_URLVARS_BY_VIEW_NAME = {
'developer-list': (),
'developer-detail': ('developer',),
'language-list': ('developer',),
'language-detail': ('language', 'developer'),
'version-list': ('language', 'developer'),
'version-detail': ('version', 'language', 'developer'),
}
def setUp(self):
super(TestRelatedLinkedField, self).setUp()
self.resources = [
Resource(
'developer',
'developers',
DeveloperViewSet,
[
NestedResource(
'language',
'languages',
ProgrammingLanguageViewSet,
[
NestedResource(
'version',
'versions',
ProgrammingLanguageVersionViewSet,
parent_field_lookup='language',
),
],
parent_field_lookup='author',
),
],
),
]
self.urlpatterns = make_urlpatterns_from_resources(self.resources)
def test_parent_detail(self):
source_view_name = 'language-detail'
destination_view_name = 'developer-detail'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.developer1,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{'developer': self.developer1.pk},
)
eq_(expected_url, url)
def test_parent_list(self):
source_view_name = 'language-detail'
destination_view_name = 'developer-list'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.developer1,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{},
)
eq_(expected_url, url)
def test_grandparent_detail(self):
source_view_name = 'version-detail'
destination_view_name = 'developer-detail'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
'version': self.programming_language_version.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.developer1,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{'developer': self.developer1.pk},
)
eq_(expected_url, url)
def test_grandparent_list(self):
source_view_name = 'version-detail'
destination_view_name = 'developer-list'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
'version': self.programming_language_version.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.developer1,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{},
)
eq_(expected_url, url)
def test_child_detail(self):
source_view_name = 'language-detail'
destination_view_name = 'version-detail'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.programming_language_version,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
'version': self.programming_language_version.pk,
},
)
eq_(expected_url, url)
def test_child_list(self):
source_view_name = 'language-detail'
destination_view_name = 'version-list'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
self.programming_language_version,
)
expected_url = self._make_url_with_kwargs(
django_request,
destination_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
)
eq_(expected_url, url)
def test_unsaved_child(self):
source_view_name = 'language-detail'
destination_view_name = 'version-detail'
django_request = _make_django_request(
source_view_name,
{
'developer': self.developer1.pk,
'language': self.programming_language1.pk,
},
self.urlpatterns,
)
url = self._make_url_via_field(
django_request,
source_view_name,
destination_view_name,
ProgrammingLanguageVersion(language=self.programming_language1),
)
assert_is_none(url)
def _make_url_via_field(
self,
django_request,
source_view_name,
destination_view_name,
destination_view_object,
):
drf_request = _make_drf_request(django_request)
field = HyperlinkedNestedRelatedField(
source_view_name,
self._URLVARS_BY_VIEW_NAME,
read_only=True,
)
field.reverse = partial(reverse, urlconf=self.urlpatterns)
url = field.get_url(
destination_view_object,
destination_view_name,
drf_request,
None,
)
return url
def _make_url_with_kwargs(self, django_request, view_name, view_kwargs):
url_path = \
reverse(view_name, kwargs=view_kwargs, urlconf=self.urlpatterns)
url = django_request.build_absolute_uri(url_path)
return url
def _make_django_request(view_name, view_kwargs, urlconf=None):
request_factory = RequestFactory(SERVER_NAME='example.org')
url_path = reverse(view_name, kwargs=view_kwargs, urlconf=urlconf)
django_request = request_factory.get(url_path)
django_request.resolver_match = (view_name, (), view_kwargs)
return django_request
def _make_drf_request(django_request):
view_kwargs = django_request.resolver_match[2]
drf_request = \
Request(django_request, parser_context={'kwargs': view_kwargs})
return drf_request
| {
"repo_name": "pombredanne/drf-nested-resources",
"path": "tests/test_serialization.py",
"copies": "1",
"size": "11104",
"license": "bsd-3-clause",
"hash": 1246936983127690500,
"line_mean": 31.6588235294,
"line_max": 76,
"alpha_frac": 0.5486311239,
"autogenerated": false,
"ratio": 4.290571870170015,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5339202994070015,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from nose.tools import eq_
from bleach import clean
clean = partial(clean, tags=['p'], attributes=['style'])
def test_allowed_css():
tests = (
('font-family: Arial; color: red; float: left; '
'background-color: red;', 'color: red;', ['color']),
('border: 1px solid blue; color: red; float: left;', 'color: red;',
['color']),
('border: 1px solid blue; color: red; float: left;',
'color: red; float: left;', ['color', 'float']),
('color: red; float: left; padding: 1em;', 'color: red; float: left;',
['color', 'float']),
('color: red; float: left; padding: 1em;', 'color: red;', ['color']),
('cursor: -moz-grab;', 'cursor: -moz-grab;', ['cursor']),
('color: hsl(30,100%,50%);', 'color: hsl(30,100%,50%);', ['color']),
('color: rgba(255,0,0,0.4);', 'color: rgba(255,0,0,0.4);', ['color']),
("text-overflow: ',' ellipsis;", "text-overflow: ',' ellipsis;",
['text-overflow']),
('text-overflow: "," ellipsis;', 'text-overflow: "," ellipsis;',
['text-overflow']),
('font-family: "Arial";', 'font-family: "Arial";', ['font-family']),
)
p_single = '<p style="{0!s}">bar</p>'
p_double = "<p style='{0!s}'>bar</p>"
def check(i, o, s):
if '"' in i:
eq_(p_double.format(o), clean(p_double.format(i), styles=s))
else:
eq_(p_single.format(o), clean(p_single.format(i), styles=s))
for i, o, s in tests:
yield check, i, o, s
def test_valid_css():
"""The sanitizer should fix missing CSS values."""
styles = ['color', 'float']
eq_('<p style="float: left;">foo</p>',
clean('<p style="float: left; color: ">foo</p>', styles=styles))
eq_('<p style="">foo</p>',
clean('<p style="color: float: left;">foo</p>', styles=styles))
def test_style_hang():
"""The sanitizer should not hang on any inline styles"""
# TODO: Neaten this up. It's copypasta from MDN/Kuma to repro the bug
style = ("""margin-top: 0px; margin-right: 0px; margin-bottom: 1.286em; """
"""margin-left: 0px; padding-top: 15px; padding-right: 15px; """
"""padding-bottom: 15px; padding-left: 15px; border-top-width: """
"""1px; border-right-width: 1px; border-bottom-width: 1px; """
"""border-left-width: 1px; border-top-style: dotted; """
"""border-right-style: dotted; border-bottom-style: dotted; """
"""border-left-style: dotted; border-top-color: rgb(203, 200, """
"""185); border-right-color: rgb(203, 200, 185); """
"""border-bottom-color: rgb(203, 200, 185); border-left-color: """
"""rgb(203, 200, 185); background-image: initial; """
"""background-attachment: initial; background-origin: initial; """
"""background-clip: initial; background-color: """
"""rgb(246, 246, 242); overflow-x: auto; overflow-y: auto; """
"""font: normal normal normal 100%/normal 'Courier New', """
"""'Andale Mono', monospace; background-position: initial """
"""initial; background-repeat: initial initial;""")
html = '<p style="{0!s}">Hello world</p>'.format(style)
styles = [
'border', 'float', 'overflow', 'min-height', 'vertical-align',
'white-space',
'margin', 'margin-left', 'margin-top', 'margin-bottom', 'margin-right',
'padding', 'padding-left', 'padding-top', 'padding-bottom', 'padding-right',
'background',
'background-color',
'font', 'font-size', 'font-weight', 'text-align', 'text-transform',
]
expected = ("""<p style="margin-top: 0px; margin-right: 0px; """
"""margin-bottom: 1.286em; margin-left: 0px; padding-top: """
"""15px; padding-right: 15px; padding-bottom: 15px; """
"""padding-left: 15px; background-color: """
"""rgb(246, 246, 242); font: normal normal normal """
"""100%/normal 'Courier New', 'Andale Mono', monospace;">"""
"""Hello world</p>""")
result = clean(html, styles=styles)
eq_(expected, result)
| {
"repo_name": "marcdm/bleach",
"path": "bleach/tests/test_css.py",
"copies": "1",
"size": "4238",
"license": "bsd-3-clause",
"hash": 9200600908157013000,
"line_mean": 44.5698924731,
"line_max": 84,
"alpha_frac": 0.5427088249,
"autogenerated": false,
"ratio": 3.4371451743714516,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44798539992714514,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from openapi_core.deserializing.parameters.deserializers import (
CallableParameterDeserializer,
)
from openapi_core.deserializing.parameters.deserializers import (
UnsupportedStyleDeserializer,
)
from openapi_core.deserializing.parameters.util import split
from openapi_core.schema.parameters import get_style
class ParameterDeserializersFactory:
PARAMETER_STYLE_DESERIALIZERS = {
"form": partial(split, separator=","),
"simple": partial(split, separator=","),
"spaceDelimited": partial(split, separator=" "),
"pipeDelimited": partial(split, separator="|"),
}
def create(self, param_or_header):
style = get_style(param_or_header)
if style not in self.PARAMETER_STYLE_DESERIALIZERS:
return UnsupportedStyleDeserializer(param_or_header, style)
deserialize_callable = self.PARAMETER_STYLE_DESERIALIZERS[style]
return CallableParameterDeserializer(
param_or_header, style, deserialize_callable
)
| {
"repo_name": "p1c2u/openapi-core",
"path": "openapi_core/deserializing/parameters/factories.py",
"copies": "1",
"size": "1046",
"license": "bsd-3-clause",
"hash": 4494886968866590000,
"line_mean": 32.7419354839,
"line_max": 72,
"alpha_frac": 0.7179732314,
"autogenerated": false,
"ratio": 4.101960784313725,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5319934015713725,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from OpenGL.GL import *
from PyEngine3D.Utilities import GetClassName, Singleton
from PyEngine3D.Common import logger
from PyEngine3D.OpenGLContext import OpenGLContext
class FrameBuffer:
errors = (
GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT,
GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER,
GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT,
GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE,
GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER,
GL_FRAMEBUFFER_UNDEFINED,
GL_FRAMEBUFFER_UNSUPPORTED
)
def __init__(self, name=''):
logger.info("Create %s framebuffer" % name)
self.name = name
self.buffer = glGenFramebuffers(1)
self.max_draw_buffers = glGetInteger(GL_MAX_DRAW_BUFFERS)
self.color_textures = [None, ] * self.max_draw_buffers
self.attach_count = 0
self.attachments = [0, ] * self.max_draw_buffers
self.depth_texture = None
self.width = 0
self.height = 0
self.viewport_width = 0
self.viewport_height = 0
self.viewport_scale = 1.0
self.commands = []
self.target_face = GL_TEXTURE_CUBE_MAP_POSITIVE_X # cubemap face
self.target_layer = 0 # 3d texture layer
self.target_level = 0 # mipmap level
def __del__(self):
self.set_color_textures()
self.set_depth_texture(None)
def get_error(self, error_code):
for error in self.errors:
if error == error_code:
return str(error)
def delete(self):
logger.info("Delete %s framebuffer" % self.name)
self.set_color_textures()
self.set_depth_texture(None)
glDeleteFramebuffers(1, [self.buffer, ])
def set_color_textures(self, *textures):
texture_count = len(textures)
self.attach_count = 0
for i, color_texture in enumerate(self.color_textures):
if color_texture:
color_texture.set_attachment(False)
texture = textures[i] if i < texture_count else None
if texture is None:
self.attachments[i] = 0
self.color_textures[i] = None
else:
self.attach_count += 1
self.attachments[i] = GL_COLOR_ATTACHMENT0 + i
self.color_textures[i] = texture
texture.set_attachment(True)
def set_depth_texture(self, texture):
if self.depth_texture:
self.depth_texture.set_attachment(False)
if texture:
texture.set_attachment(True)
self.depth_texture = texture
def set_viewport(self, x, y, width, height, scale):
self.width = width
self.height = height
self.viewport_width = max(1, int(width * scale))
self.viewport_height = max(1, int(height * scale))
self.viewport_scale = scale
glViewport(x, y, self.viewport_width, self.viewport_height)
def func_bind_framebuffer(self, attachment, target, texture_buffer, offset=0):
if GL_RENDERBUFFER == target:
glFramebufferRenderbuffer(GL_FRAMEBUFFER, attachment, GL_RENDERBUFFER, texture_buffer)
elif GL_TEXTURE_2D == target:
glFramebufferTexture2D(GL_FRAMEBUFFER, attachment, GL_TEXTURE_2D, texture_buffer, self.target_level)
elif GL_TEXTURE_2D_ARRAY == target:
glFramebufferTextureLayer(GL_FRAMEBUFFER, attachment, texture_buffer, 0, self.target_layer + offset)
elif GL_TEXTURE_3D == target:
glFramebufferTexture3D(GL_FRAMEBUFFER, attachment, GL_TEXTURE_3D, texture_buffer, self.target_level, self.target_layer + offset)
elif GL_TEXTURE_CUBE_MAP == target:
glFramebufferTexture2D(GL_FRAMEBUFFER, attachment, self.target_face, texture_buffer, self.target_level)
def add_command(self, *args):
self.commands.append(partial(*args))
def build_command(self):
self.commands.clear()
self.add_command(glBindFramebuffer, GL_FRAMEBUFFER, self.buffer)
# bind color textures
layer_offset = 0
last_texture = None
for i, color_texture in enumerate(self.color_textures):
if last_texture != color_texture:
layer_offset = 0
last_texture = color_texture
else:
layer_offset += 1
attachment = GL_COLOR_ATTACHMENT0 + i
if color_texture is not None:
self.add_command(self.func_bind_framebuffer, attachment, color_texture.target, color_texture.buffer, layer_offset)
if self.attach_count > 0:
self.add_command(glDrawBuffers, self.attach_count, self.attachments)
else:
self.add_command(glDrawBuffer, GL_NONE)
self.add_command(glReadBuffer, GL_NONE)
# bind depth texture
if self.depth_texture is not None:
attachment = OpenGLContext.get_depth_attachment(self.depth_texture.internal_format)
self.add_command(self.func_bind_framebuffer, attachment, self.depth_texture.target, self.depth_texture.buffer)
else:
self.add_command(glFramebufferTexture, GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, 0, 0)
def run_bind_framebuffer(self, target_face=GL_TEXTURE_CUBE_MAP_POSITIVE_X, target_layer=0, target_level=0):
self.target_face = target_face
self.target_layer = target_layer
self.target_level = target_level
# update viewport
viewport_scale = 1.0 / (2.0 ** target_level)
if self.attach_count > 0:
self.set_viewport(0, 0, self.color_textures[0].width, self.color_textures[0].height, viewport_scale)
elif self.depth_texture is not None:
self.set_viewport(0, 0, self.depth_texture.width, self.depth_texture.height, viewport_scale)
# run command
for cmd in self.commands:
cmd()
gl_error = glCheckFramebufferStatus(GL_FRAMEBUFFER)
if gl_error != GL_FRAMEBUFFER_COMPLETE:
error_message = "glCheckFramebufferStatus error %s." % self.get_error(gl_error)
logger.error(error_message)
raise BaseException(error_message)
def unbind_framebuffer(self):
self.set_color_textures()
self.set_depth_texture(None)
glBindFramebuffer(GL_FRAMEBUFFER, 0)
def copy_framebuffer(self, src, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, target=GL_COLOR_BUFFER_BIT, filter_type=GL_LINEAR):
glBindFramebuffer(GL_READ_FRAMEBUFFER, src.buffer)
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, self.buffer)
if GL_COLOR_BUFFER_BIT == target:
glDrawBuffers(1, (GL_COLOR_ATTACHMENT0,))
glReadBuffer(GL_COLOR_ATTACHMENT0)
elif GL_DEPTH_BUFFER_BIT == target and src.depth_texture is not None:
attachment = OpenGLContext.get_depth_attachment(src.depth_texture.internal_format)
glDrawBuffers(1, (attachment, ))
glReadBuffer(attachment)
glBlitFramebuffer(src_x, src_y, src_w or src.viewport_width, src_h or src.viewport_height,
dst_x, dst_y, dst_w or self.viewport_width, dst_h or self.viewport_height,
target, filter_type)
def mirror_framebuffer(self, src, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, target=GL_COLOR_BUFFER_BIT, filter_type=GL_LINEAR):
glBindFramebuffer(GL_READ_FRAMEBUFFER, src.buffer)
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, self.buffer)
if GL_COLOR_BUFFER_BIT == target:
glDrawBuffers(1, (GL_COLOR_ATTACHMENT0,))
glReadBuffer(GL_COLOR_ATTACHMENT0)
elif GL_DEPTH_BUFFER_BIT == target and src.depth_texture is not None:
attachment = OpenGLContext.get_depth_attachment(src.depth_texture.internal_format)
glDrawBuffers(1, (attachment, ))
glReadBuffer(attachment)
glBlitFramebuffer(src_w or src.viewport_width, src_y, src_x, src_h or src.viewport_height,
dst_x, dst_y, dst_w or self.viewport_width, dst_h or self.viewport_height,
target, filter_type)
def blit_framebuffer(self, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, filter_type=GL_LINEAR):
# active default frame buffer
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0)
glBlitFramebuffer(src_x, src_y, src_w or self.viewport_width, src_h or self.viewport_height,
dst_x, dst_y, dst_w or self.viewport_width, dst_h or self.viewport_height,
GL_COLOR_BUFFER_BIT, filter_type)
class FrameBufferManager(Singleton):
def __init__(self):
self.framebuffers = {}
self.current_framebuffer = None
def clear_framebuffer(self):
for framebuffer in self.framebuffers.values():
framebuffer.delete()
self.framebuffers = {}
self.current_framebuffer = None
def rebuild_command(self):
for framebuffer in self.framebuffers.values():
framebuffer.build_command()
def delete_framebuffer(self, *textures, depth_texture):
key = (textures, depth_texture)
if key in self.framebuffers:
framebuffer = self.framebuffers.pop(key)
framebuffer.delete()
def get_framebuffer(self, *textures, depth_texture=None):
key = (textures, depth_texture)
if key in self.framebuffers:
framebuffer = self.framebuffers[key]
else:
name = ''
if 0 < len(textures):
name = textures[0].name
error = False
width = textures[0].width
height = textures[0].height
for texture in textures[1:]:
if texture is not None and (width != texture.width or height != texture.height):
error = True
break
if depth_texture is not None and (width != depth_texture.width or height != depth_texture.height):
error = True
if error:
error_message = "Render targets must be the same size."
logger.error(error_message)
raise BaseException(error_message)
framebuffer = FrameBuffer(name)
self.framebuffers[key] = framebuffer
framebuffer.set_color_textures(*textures)
framebuffer.set_depth_texture(depth_texture)
framebuffer.build_command()
return framebuffer
def bind_framebuffer(self, *textures, depth_texture=None,
target_face=GL_TEXTURE_CUBE_MAP_POSITIVE_X, target_layer=0, target_level=0):
glBindFramebuffer(GL_FRAMEBUFFER, 0)
self.current_framebuffer = self.get_framebuffer(*textures, depth_texture=depth_texture)
self.current_framebuffer.run_bind_framebuffer(target_face=target_face,
target_layer=target_layer,
target_level=target_level)
return self.current_framebuffer
def unbind_framebuffer(self):
glBindFramebuffer(GL_FRAMEBUFFER, 0)
def copy_rendertarget(self, src_render_target, dst_render_target,
src_x=0, src_y=0, src_w=0, src_h=0,
dst_x=0, dst_y=0, dst_w=0, dst_h=0, target=GL_COLOR_BUFFER_BIT, filter_type=GL_LINEAR):
src_framebuffer = self.bind_framebuffer(src_render_target)
self.bind_framebuffer(dst_render_target)
glClear(GL_COLOR_BUFFER_BIT)
self.current_framebuffer.copy_framebuffer(src_framebuffer, src_x, src_y, src_w, src_h, dst_x, dst_y, dst_w, dst_h, target, filter_type)
def copy_framebuffer(self, src, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, target=GL_COLOR_BUFFER_BIT, filter_type=GL_LINEAR):
self.current_framebuffer.copy_framebuffer(src, src_x, src_y, src_w, src_h, dst_x, dst_y, dst_w, dst_h, target, filter_type)
def mirror_framebuffer(self, src, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, target=GL_COLOR_BUFFER_BIT, filter_type=GL_LINEAR):
self.current_framebuffer.mirror_framebuffer(src, src_x, src_y, src_w, src_h, dst_x, dst_y, dst_w, dst_h, target, filter_type)
def blit_framebuffer(self, src_x=0, src_y=0, src_w=0, src_h=0, dst_x=0, dst_y=0, dst_w=0, dst_h=0, filter_type=GL_LINEAR):
self.current_framebuffer.blit_framebuffer(src_x, src_y, src_w, src_h, dst_x, dst_y, dst_w, dst_h, filter_type)
| {
"repo_name": "ubuntunux/GuineaPig",
"path": "PyEngine3D/OpenGLContext/FrameBuffer.py",
"copies": "1",
"size": "12670",
"license": "bsd-2-clause",
"hash": -631755463148294400,
"line_mean": 43.7703180212,
"line_max": 161,
"alpha_frac": 0.6187056038,
"autogenerated": false,
"ratio": 3.5660005629045877,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46847061667045875,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ..params.encodings import EncodingParam
from ..params.values import URLParam
from .base import StringProp
__all__ = ["PhotoProp"]
class PhotoProp(StringProp):
authorized_params = ["ENCODING", "TYPE", "VALUE"]
name = "PHOTO"
def export(self, path):
"""Export value to a file.
"""
pass
def import(self, path):
"""Import a file to the value.
"""
if URLParam in self.params:
## Do not import.
return
## Check and get encodings.
encoders = {p for p in self.params if isinstance(p, EncodingParam)}
if len(encoders) == 0:
raise ValueError("No encoding found.")
elif len(encoders) > 1:
raise ValueError("More than one encoding, ambigous.")
encoder = next(encoders)
## Check path.
if not path.exists():
raise FileNotFoundError( str(path) )
## TODO: Check file extension.
## Do import.
with path.open("rb") as file:
self.value = encoder.encode(file.read())
| {
"repo_name": "Jorispilot/pycard",
"path": "pycard/prop/photo.py",
"copies": "1",
"size": "1111",
"license": "mit",
"hash": -7556329312409859000,
"line_mean": 24.8372093023,
"line_max": 75,
"alpha_frac": 0.5733573357,
"autogenerated": false,
"ratio": 4.224334600760456,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005739634881625348,
"num_lines": 43
} |
from functools import partial
from parser.libpicohttpparser import ffi
from request import HttpRequest
class TracingProtocol:
def __init__(self, on_headers_adapter: callable,
on_body_adapter: callable):
self.requests = []
self.error = None
self.on_headers_adapter = on_headers_adapter
self.on_body_adapter = on_body_adapter
self.on_headers_call_count = 0
self.on_body_call_count = 0
self.on_error_call_count = 0
def on_headers(self, *args):
self.request = self.on_headers_adapter(*args)
self.requests.append(self.request)
self.on_headers_call_count += 1
def on_body(self, body):
self.request.body = self.on_body_adapter(body)
self.on_body_call_count += 1
def on_error(self, error: str):
self.error = error
self.on_error_call_count += 1
def _request_from_cprotocol(method: memoryview, path: memoryview, version: int,
headers: memoryview):
method = method.tobytes().decode('ascii')
path = path.tobytes().decode('ascii')
version = "1.0" if version == 0 else "1.1"
headers_len = headers.nbytes // ffi.sizeof("struct phr_header")
headers_cdata = ffi.from_buffer(headers)
headers_cdata = ffi.cast(
'struct phr_header[{}]'.format(headers_len), headers_cdata)
headers = _extract_headers(headers_cdata)
return HttpRequest(method, path, version, headers)
def _body_from_cprotocol(body: memoryview):
return None if body is None else body.tobytes()
def _request_from_cffiprotocol(method: "char[]", path: "char[]", version: int,
headers: "struct phr_header[]"):
method = ffi.buffer(method)[:].decode('ascii')
path = ffi.buffer(path)[:].decode('ascii')
version = "1.0" if version == 0 else "1.1"
headers = _extract_headers(headers)
return HttpRequest(method, path, version, headers)
def _body_from_cffiprotocol(body: "char[]"):
return None if body is None else ffi.buffer(body)[:]
def _extract_headers(headers_cdata: "struct phr_header[]"):
headers = {}
for header in headers_cdata:
name = ffi.string(header.name, header.name_len).decode('ascii').title()
value = ffi.string(header.value, header.value_len).decode('latin1')
headers[name] = value
return headers
CTracingProtocol = partial(
TracingProtocol, on_headers_adapter=_request_from_cprotocol,
on_body_adapter=_body_from_cprotocol)
CffiTracingProtocol = partial(
TracingProtocol, on_headers_adapter=_request_from_cffiprotocol,
on_body_adapter=_body_from_cffiprotocol)
| {
"repo_name": "squeaky-pl/japronto",
"path": "src/japronto/protocol/tracing.py",
"copies": "1",
"size": "2664",
"license": "mit",
"hash": 8364239998551004000,
"line_mean": 28.9325842697,
"line_max": 79,
"alpha_frac": 0.6486486486,
"autogenerated": false,
"ratio": 3.6343792633015006,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9783027911901501,
"avg_score": 0,
"num_lines": 89
} |
from functools import partial
from plenum.common.messages.node_messages import PrePrepare
from plenum.common.types import OPERATION, f
from plenum.common.constants import DOMAIN_LEDGER_ID, POOL_LEDGER_ID, AUDIT_LEDGER_ID
from plenum.common.util import getMaxFailures, get_utc_epoch
from plenum.server.consensus.ordering_service import OrderingService
from plenum.server.consensus.utils import get_original_viewno
from plenum.server.node import Node
from plenum.server.quorums import Quorums
from plenum.test import waits
from plenum.test.helper import chk_all_funcs, init_discarded
from plenum.test.spy_helpers import getAllArgs
from plenum.test.test_node import TestNode, getNonPrimaryReplicas, \
getAllReplicas, getPrimaryReplica
# This code is unclear, refactor
def checkPropagated(looper, txnPoolNodeSet, request, faultyNodes=0):
nodesSize = len(list(txnPoolNodeSet))
# noinspection PyIncorrectDocstring
def g(node: TestNode):
"""
1. no of propagate received by node must be n -1 with zero
faulty nodes in system; where n = num of nodes
2. no of propagate received by node must be greater than
or equal to f + 1
"""
actualMsgs = len([x for x in
getAllArgs(node, Node.processPropagate)
if x['msg'].request[f.REQ_ID.nm] == request.reqId and
x['msg'].request[f.IDENTIFIER.nm] == request.identifier and
x['msg'].request[OPERATION] == request.operation])
numOfMsgsWithZFN = nodesSize - 1
numOfMsgsWithFaults = faultyNodes + 1
assert msgCountOK(nodesSize,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithFaults)
timeout = waits.expectedPropagateTime(len(txnPoolNodeSet))
funcs = [partial(g, node) for node in txnPoolNodeSet]
chk_all_funcs(looper, funcs, faultyNodes, timeout)
def checkPrePrepared(looper,
txnPoolNodeSet,
propagated1,
instIds,
faultyNodes=0,
timeout=30):
nodesSize = len(list(txnPoolNodeSet))
def g(instId):
primary = getPrimaryReplica(txnPoolNodeSet, instId)
nonPrimaryReplicas = getNonPrimaryReplicas(txnPoolNodeSet, instId)
def primarySeesCorrectNumberOfPREPREPAREs():
"""
no of PRE-PREPARE as seen by processPrePrepare
method for primary must be 0 with or without faults in system
"""
l1 = len([param for param in
getAllArgs(primary._ordering_service,
primary._ordering_service.process_preprepare)])
assert l1 == 0, 'Primary {} sees no pre-prepare'.format(primary)
def nonPrimarySeesCorrectNumberOfPREPREPAREs():
"""
1. no of PRE-PREPARE as seen by processPrePrepare method for
non-primaries must be 1; whn zero faulty nodes in system.
2. no of PRE-PREPARE as seen by processPrePrepare method for
non-primaries must be greater than or equal to 0;
with faults in system.
"""
tm = get_utc_epoch()
expectedPrePrepareRequest = PrePrepare(
instId,
primary.viewNo,
primary.lastPrePrepareSeqNo,
tm,
[propagated1.digest],
init_discarded(),
primary._ordering_service.generate_pp_digest([propagated1.digest], primary.viewNo, tm),
DOMAIN_LEDGER_ID,
primary._ordering_service.get_state_root_hash(DOMAIN_LEDGER_ID),
primary._ordering_service.get_txn_root_hash(DOMAIN_LEDGER_ID),
0,
True,
primary._ordering_service.get_state_root_hash(POOL_LEDGER_ID),
primary._ordering_service.get_txn_root_hash(AUDIT_LEDGER_ID),
)
passes = 0
for npr in nonPrimaryReplicas:
actualMsgs = len([param for param in
getAllArgs(npr._ordering_service,
npr._ordering_service.process_preprepare)
if (param['pre_prepare'][0:3] +
param['pre_prepare'][4:6] +
param['pre_prepare'][7:],
param['sender']) == (
expectedPrePrepareRequest[0:3] +
expectedPrePrepareRequest[4:6] +
param['pre_prepare'][7:],
primary.name)])
numOfMsgsWithZFN = 1
numOfMsgsWithFaults = 0
passes += int(msgCountOK(nodesSize,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithFaults))
assert passes >= len(nonPrimaryReplicas) - faultyNodes, \
'1Non-primary sees correct number pre-prepares - {}'.format(passes)
def primarySentsCorrectNumberOfPREPREPAREs():
"""
1. no of PRE-PREPARE sent by primary is 1 with or without
fault in system but, when primary is faulty no of sent PRE_PREPARE
will be zero and primary must be marked as malicious.
"""
actualMsgs = len([param for param in
getAllArgs(primary._ordering_service,
primary._ordering_service.send_pre_prepare)
if param['ppReq'].reqIdr[0] == propagated1.digest
and param['ppReq'].digest ==
primary._ordering_service.generate_pp_digest([propagated1.digest],
get_original_viewno(param['ppReq']),
param['ppReq'].ppTime)])
numOfMsgsWithZFN = 1
# TODO: Considering, Primary is not faulty and will always send
# PRE-PREPARE. Write separate test for testing when Primary
# is faulty
assert msgCountOK(nodesSize,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithZFN), 'Primary sends correct number of per-prepare'
def nonPrimaryReceivesCorrectNumberOfPREPREPAREs():
"""
1. no of PRE-PREPARE received by non-primaries must be 1
with zero faults in system, and 0 faults in system.
"""
passes = 0
for npr in nonPrimaryReplicas:
l4 = len([param for param in
getAllArgs(npr._ordering_service,
npr._ordering_service._add_to_pre_prepares)
if param['pp'].reqIdr[0] == propagated1.digest
and param['pp'].digest ==
OrderingService.generate_pp_digest([propagated1.digest, ],
get_original_viewno(param['pp']),
param['pp'].ppTime)])
numOfMsgsWithZFN = 1
numOfMsgsWithFaults = 0
passes += msgCountOK(nodesSize,
faultyNodes,
l4,
numOfMsgsWithZFN,
numOfMsgsWithFaults)
assert passes >= len(nonPrimaryReplicas) - faultyNodes, \
'2Non-primary receives correct number of pre-prepare -- {}'.format(passes)
primarySeesCorrectNumberOfPREPREPAREs()
nonPrimarySeesCorrectNumberOfPREPREPAREs()
primarySentsCorrectNumberOfPREPREPAREs()
nonPrimaryReceivesCorrectNumberOfPREPREPAREs()
funcs = [partial(g, instId) for instId in instIds]
# TODO Select or create the timeout from 'waits'. Don't use constant.
# looper.run(eventuallyAll(*coros, retryWait=1, totalTimeout=timeout))
chk_all_funcs(looper, funcs, faultyNodes, timeout)
def checkPrepared(looper, txnPoolNodeSet, preprepared1, instIds, faultyNodes=0,
timeout=30):
nodeCount = len(list(txnPoolNodeSet))
quorums = Quorums(nodeCount)
def g(instId):
allReplicas = getAllReplicas(txnPoolNodeSet, instId)
primary = getPrimaryReplica(txnPoolNodeSet, instId)
nonPrimaryReplicas = getNonPrimaryReplicas(txnPoolNodeSet, instId)
def primaryDontSendAnyPREPAREs():
"""
1. no of PREPARE sent by primary should be 0
"""
for r in allReplicas:
for param in getAllArgs(r._ordering_service,
OrderingService.process_prepare):
sender = param['sender']
assert sender != primary.name
def allReplicasSeeCorrectNumberOfPREPAREs():
"""
1. no of PREPARE received by replicas must be n - 1;
n = num of nodes without fault, and greater than or equal to
n-f-1 with faults.
"""
passes = 0
numOfMsgsWithZFN = nodeCount - 1
numOfMsgsWithFaults = quorums.prepare.value
for replica in allReplicas:
key = primary.viewNo, primary.lastPrePrepareSeqNo
if key in replica._ordering_service.prepares:
actualMsgs = len(replica._ordering_service.prepares[key].voters)
passes += int(msgCountOK(nodeCount,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithFaults))
assert passes >= len(allReplicas) - faultyNodes
def primaryReceivesCorrectNumberOfPREPAREs():
"""
num of PREPARE seen by primary replica is n - 1;
n = num of nodes without fault, and greater than or equal to
n-f-1 with faults.
"""
actualMsgs = len([param for param in
getAllArgs(primary._ordering_service,
primary._ordering_service.process_prepare)
if (param['prepare'].instId,
param['prepare'].viewNo,
param['prepare'].ppSeqNo) == (
primary.instId, primary.viewNo,
primary.lastPrePrepareSeqNo) and
param['sender'] != primary.name])
numOfMsgsWithZFN = nodeCount - 1
numOfMsgsWithFaults = quorums.prepare.value
assert msgCountOK(nodeCount,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithFaults)
# TODO what if the primary is faulty?
def nonPrimaryReplicasReceiveCorrectNumberOfPREPAREs():
"""
num of PREPARE seen by Non primary replica is n - 2 without
faults and n-f-2 with faults.
"""
passes = 0
numOfMsgsWithZFN = nodeCount - 2
numOfMsgsWithFaults = quorums.prepare.value - 1
for npr in nonPrimaryReplicas:
actualMsgs = len(
[
param for param in getAllArgs(
npr._ordering_service,
npr._ordering_service.process_prepare) if (param['prepare'].instId,
param['prepare'].viewNo,
param['prepare'].ppSeqNo) == (
primary.instId,
primary.viewNo,
primary.lastPrePrepareSeqNo)])
passes += int(msgCountOK(nodeCount,
faultyNodes,
actualMsgs,
numOfMsgsWithZFN,
numOfMsgsWithFaults))
assert passes >= len(nonPrimaryReplicas) - faultyNodes
# TODO how do we know if one of the faulty nodes is a primary or
# not?
primaryDontSendAnyPREPAREs()
allReplicasSeeCorrectNumberOfPREPAREs()
primaryReceivesCorrectNumberOfPREPAREs()
nonPrimaryReplicasReceiveCorrectNumberOfPREPAREs()
funcs = [partial(g, instId) for instId in instIds]
# TODO Select or create the timeout from 'waits'. Don't use constant.
# looper.run(eventuallyAll(*coros, retryWait=1, totalTimeout=timeout))
chk_all_funcs(looper, funcs, faultyNodes, timeout)
def checkCommitted(looper, txnPoolNodeSet, prepared1, instIds, faultyNodes=0):
timeout = waits.expectedCommittedTime(len(txnPoolNodeSet))
nodeCount = len((list(txnPoolNodeSet)))
quorums = Quorums(nodeCount)
def g(instId):
allReplicas = getAllReplicas(txnPoolNodeSet, instId)
primaryReplica = getPrimaryReplica(txnPoolNodeSet, instId)
def replicas_gets_correct_num_of_COMMITs():
"""
num of commit messages must be = n when zero fault;
n = num of nodes and greater than or equal to
n-f with faults.
"""
passes = 0
numOfMsgsWithZFN = quorums.commit.value
numOfMsgsWithFault = quorums.commit.value
key = (primaryReplica.viewNo, primaryReplica.lastPrePrepareSeqNo)
for r in allReplicas:
if key in r._ordering_service.commits:
rcvdCommitRqst = r._ordering_service.commits[key]
actualMsgsReceived = len(rcvdCommitRqst.voters)
passes += int(msgCountOK(nodeCount,
faultyNodes,
actualMsgsReceived,
numOfMsgsWithZFN,
numOfMsgsWithFault))
assert passes >= min(len(allReplicas) - faultyNodes,
numOfMsgsWithZFN)
replicas_gets_correct_num_of_COMMITs()
funcs = [partial(g, instId) for instId in instIds]
# TODO Select or create the timeout from 'waits'. Don't use constant.
# looper.run(eventuallyAll(*coros, retryWait=1, totalTimeout=timeout))
chk_all_funcs(looper, funcs, faultyNodes, timeout)
def msgCountOK(nodesSize,
faultyNodes,
actualMessagesReceived,
numOfMsgsWithZNF,
numOfSufficientMsgs):
if faultyNodes == 0:
return actualMessagesReceived == numOfMsgsWithZNF
elif faultyNodes <= getMaxFailures(nodesSize):
return actualMessagesReceived >= numOfSufficientMsgs
else:
# Less than or equal to `numOfSufficientMsgs` since the faults may
# not reduce the number of correct messages
return actualMessagesReceived <= numOfSufficientMsgs
def chk_commits_prepares_recvd(count, receivers, sender):
counts = {}
sender_replica_names = {r.instId: r.name for r in sender.replicas.values()}
for node in receivers:
for replica in node.replicas.values():
if replica.instId not in counts:
counts[replica.instId] = 0
nm = sender_replica_names[replica.instId]
for commit in replica._ordering_service.commits.values():
counts[replica.instId] += int(nm in commit.voters)
for prepare in replica._ordering_service.prepares.values():
counts[replica.instId] += int(nm in prepare.voters)
for c in counts.values():
assert count == c, "expected {}, but have {}".format(count, c)
| {
"repo_name": "evernym/zeno",
"path": "plenum/test/node_request/node_request_helper.py",
"copies": "2",
"size": "16725",
"license": "apache-2.0",
"hash": 1405368365070987000,
"line_mean": 44.2027027027,
"line_max": 111,
"alpha_frac": 0.5294469357,
"autogenerated": false,
"ratio": 4.604900881057269,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008407567355032203,
"num_lines": 370
} |
from functools import partial
from plumbum import local
import benchbuild as bb
from benchbuild.environments.domain.declarative import ContainerImage
from benchbuild.settings import CFG
from benchbuild.source import Git
from benchbuild.utils.cmd import make, mkdir, tar
from benchbuild.utils.settings import get_number_of_jobs
class SpiderMonkey(bb.Project):
"""
SpiderMonkey requires a legacy version of autoconf: autoconf-2.13
"""
NAME = 'js'
DOMAIN = 'compilation'
GROUP = 'benchbuild'
SOURCE = [
Git(
remote='https://github.com/mozilla/gecko-dev.git',
local='gecko-dev.git',
limit=5,
refspec='HEAD'
)
]
CONTAINER = ContainerImage().from_('benchbuild:alpine')
def compile(self):
gecko_repo = local.path(self.source_of('gecko-dev.git'))
js_dir = gecko_repo / "js" / "src"
clang = bb.compiler.cc(self)
clang_cxx = bb.compiler.cxx(self)
with local.cwd(js_dir):
make_src_pkg = local["./make-source-package.sh"]
with local.env(
DIST=self.builddir,
MOZJS_MAJOR_VERSION=0,
MOZJS_MINOR_VERSION=0,
MOZJS_PATCH_VERSION=0
):
make_src_pkg()
mozjs_dir = local.path("mozjs-0.0.0")
mozjs_src_dir = mozjs_dir / "js" / "src"
tar("xfj", mozjs_dir + ".tar.bz2")
with local.cwd(mozjs_src_dir):
mkdir("obj")
autoconf = local["autoconf-2.13"]
autoconf()
with local.cwd("obj"):
with local.env(CC=str(clang), CXX=str(clang_cxx)):
configure = local["../configure"]
_configure = bb.watch(configure)
_configure('--without-system-zlib')
mozjs_obj_dir = mozjs_src_dir / "obj"
with local.cwd(mozjs_obj_dir):
_make = bb.watch(make)
_make("-j", get_number_of_jobs(CFG))
def run_tests(self):
mozjs_obj_dir = local.path("mozjs-0.0.0") / "js" / "src" / "obj"
self.runtime_extension = partial(self, may_wrap=False)
bb.wrap(mozjs_obj_dir / "js" / "src" / "shell" / "js", self)
with local.cwd(mozjs_obj_dir):
_make = bb.watch(make)
_make("check-jstests")
| {
"repo_name": "PolyJIT/benchbuild",
"path": "benchbuild/projects/benchbuild/js.py",
"copies": "1",
"size": "2347",
"license": "mit",
"hash": 7769393923580749,
"line_mean": 31.5972222222,
"line_max": 72,
"alpha_frac": 0.5573072007,
"autogenerated": false,
"ratio": 3.416302765647744,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9473609966347745,
"avg_score": 0,
"num_lines": 72
} |
from functools import partial
from .primitives import EMPTY
__all__ = ['identity', 'constantly', 'caller',
'partial', 'rpartial', 'func_partial',
'curry', 'rcurry', 'autocurry',
'iffy']
def identity(x):
return x
def constantly(x):
return lambda *a, **kw: x
# an operator.methodcaller() brother
def caller(*a, **kw):
return lambda f: f(*a, **kw)
# not using functools.partial to get real function
def func_partial(func, *args, **kwargs):
"""
A functools.partial alternative, which returns a real function.
Can be used to construct methods.
"""
return lambda *a, **kw: func(*(args + a), **dict(kwargs, **kw))
def rpartial(func, *args):
return lambda *a: func(*(a + args))
def curry(func, n=EMPTY):
if n is EMPTY:
n = func.__code__.co_argcount
if n <= 1:
return func
elif n == 2:
return lambda x: lambda y: func(x, y)
else:
return lambda x: curry(partial(func, x), n - 1)
def rcurry(func, n=EMPTY):
if n is EMPTY:
n = func.__code__.co_argcount
if n <= 1:
return func
elif n == 2:
return lambda x: lambda y: func(y, x)
else:
return lambda x: rcurry(rpartial(func, x), n - 1)
def autocurry(func, n=EMPTY, _args=(), _kwargs={}):
if n is EMPTY:
n = func.__code__.co_argcount
def autocurried(*a, **kw):
args = _args + a
kwargs = _kwargs.copy()
kwargs.update(kw)
if len(args) + len(kwargs) >= n:
return func(*args, **kwargs)
else:
return autocurry(func, n, _args=args, _kwargs=kwargs)
return autocurried
def iffy(pred, action=EMPTY, default=identity):
if action is EMPTY:
return iffy(bool, pred)
else:
return lambda v: action(v) if pred(v) else \
default(v) if callable(default) else \
default
| {
"repo_name": "musicpax/funcy",
"path": "funcy/simple_funcs.py",
"copies": "3",
"size": "1941",
"license": "bsd-3-clause",
"hash": 5203841046211414000,
"line_mean": 22.962962963,
"line_max": 67,
"alpha_frac": 0.5564142195,
"autogenerated": false,
"ratio": 3.4112478031634446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5467662022663444,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .processVideo import processVideo, isGoodVideo
def args_(fn, param):
#step requirements
requirements = [('can_read_video', partial(isGoodVideo, fn['original_video']))]
if param.is_WT2:
from ..compress_add_data import storeAdditionalDataSW, hasAdditionalFiles
#if a shaffer single worm video does not have the additional files (info.xml log.csv) do not even execute the compression
requirements += [('has_additional_files', partial(hasAdditionalFiles, fn['original_video']))]
#build input arguments for processVideo
p = param.p_dict
# getROIMask
mask_param_f = ['mask_min_area', 'mask_max_area', 'thresh_block_size',
'thresh_C', 'dilation_size', 'keep_border_data', 'is_light_background']
mask_param = {x.replace('mask_', ''):p[x] for x in mask_param_f}
# bgnd subtraction
bgnd_param_mask_f = ['mask_bgnd_buff_size', 'mask_bgnd_frame_gap', 'is_light_background']
bgnd_param_mask = {x.replace('mask_bgnd_', ''):p[x] for x in bgnd_param_mask_f}
if bgnd_param_mask['buff_size']<=0 or bgnd_param_mask['frame_gap']<=0:
bgnd_param_mask = {}
# FOV splitting
fovsplitter_param_f = ['MWP_total_n_wells', 'MWP_whichsideup', 'MWP_well_shape']
if not all(k in p for k in fovsplitter_param_f):
fovsplitter_param = {}
else:
fovsplitter_param = {x.replace('MWP_',''):p[x] for x in fovsplitter_param_f}
if isinstance(fovsplitter_param['total_n_wells'], str):
fovsplitter_param['total_n_wells'] = int(fovsplitter_param['total_n_wells'])
if fovsplitter_param['total_n_wells']<0:
fovsplitter_param = {}
compress_vid_param = {
'buffer_size': p['compression_buff'],
'save_full_interval': p['save_full_interval'],
'mask_param': mask_param,
'bgnd_param': bgnd_param_mask,
'expected_fps': p['expected_fps'],
'microns_per_pixel' : p['microns_per_pixel'],
'is_extract_timestamp': p['is_extract_timestamp'],
'fovsplitter_param': fovsplitter_param,
}
argkws_d = {
'video_file': fn['original_video'],
'masked_image_file' : fn['masked_image'],
'compress_vid_param' : compress_vid_param
}
#arguments used by AnalysisPoints.py
args = {
'func':processVideo,
'argkws' : argkws_d,
'input_files' : [fn['original_video']],
'output_files': [fn['masked_image']],
'requirements' : requirements,
}
return args | {
"repo_name": "ver228/tierpsy-tracker",
"path": "tierpsy/analysis/compress/__init__.py",
"copies": "1",
"size": "2605",
"license": "mit",
"hash": 1554348110702616800,
"line_mean": 37.8955223881,
"line_max": 130,
"alpha_frac": 0.6061420345,
"autogenerated": false,
"ratio": 3.310038119440915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4416180153940915,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pulsar import Connection, Pool, get_actor
from pulsar.utils.pep import to_string
from pulsar.apps.data import RemoteStore
from pulsar.apps.ds import redis_parser
from .client import RedisClient, Pipeline, Consumer, ResponseError
from .pubsub import RedisPubSub
class RedisStoreConnection(Connection):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.parser = self._producer._parser_class()
def execute(self, *args, **options):
consumer = self.current_consumer()
consumer.start((args, options))
result = yield from consumer.on_finished
if isinstance(result, ResponseError):
raise result.exception
return result
def execute_pipeline(self, commands, raise_on_error=True):
consumer = self.current_consumer()
consumer.start((commands, raise_on_error, []))
result = yield from consumer.on_finished
if isinstance(result, ResponseError):
raise result.exception
return result
class RedisStore(RemoteStore):
'''Redis :class:`.Store` implementation.
'''
protocol_factory = partial(RedisStoreConnection, Consumer)
supported_queries = frozenset(('filter', 'exclude'))
def _init(self, namespace=None, parser_class=None, pool_size=50,
decode_responses=False, **kwargs):
self._decode_responses = decode_responses
if not parser_class:
actor = get_actor()
pyparser = actor.cfg.redis_py_parser if actor else False
parser_class = redis_parser(pyparser)
self._parser_class = parser_class
if namespace:
self._urlparams['namespace'] = namespace
self._pool = Pool(self.connect, pool_size=pool_size, loop=self._loop)
if self._database is None:
self._database = 0
self._database = int(self._database)
self.loaded_scripts = {}
@property
def pool(self):
return self._pool
@property
def namespace(self):
'''The prefix namespace to append to all transaction on keys
'''
n = self._urlparams.get('namespace')
return '%s:' % n if n else ''
def key(self):
return (self._dns, self._encoding)
def client(self):
'''Get a :class:`.RedisClient` for the Store'''
return RedisClient(self)
def pipeline(self):
'''Get a :class:`.Pipeline` for the Store'''
return Pipeline(self)
def pubsub(self, protocol=None):
return RedisPubSub(self, protocol=protocol)
def ping(self):
return self.client().ping()
def execute(self, *args, **options):
connection = yield from self._pool.connect()
with connection:
result = yield from connection.execute(*args, **options)
return result
def execute_pipeline(self, commands, raise_on_error=True):
conn = yield from self._pool.connect()
with conn:
result = yield from conn.execute_pipeline(commands, raise_on_error)
return result
def connect(self, protocol_factory=None):
protocol_factory = protocol_factory or self.create_protocol
if isinstance(self._host, tuple):
host, port = self._host
transport, connection = yield from self._loop.create_connection(
protocol_factory, host, port)
else:
raise NotImplementedError('Could not connect to %s' %
str(self._host))
if self._password:
yield from connection.execute('AUTH', self._password)
if self._database:
yield from connection.execute('SELECT', self._database)
return connection
def flush(self):
return self.execute('flushdb')
def close(self):
'''Close all open connections.'''
return self._pool.close()
def has_query(self, query_type):
return query_type in self.supported_queries
def basekey(self, meta, *args):
key = '%s%s' % (self.namespace, meta.table_name)
postfix = ':'.join((to_string(p) for p in args if p is not None))
return '%s:%s' % (key, postfix) if postfix else key
def meta(self, meta):
'''Extract model metadata for lua script stdnet/lib/lua/odm.lua'''
# indices = dict(((idx.attname, idx.unique) for idx in meta.indices))
data = meta.as_dict()
data['namespace'] = self.basekey(meta)
return data
class CompiledQuery(object):
def __init__(self, pipe, query):
self.pipe = pipe
| {
"repo_name": "nooperpudd/pulsar",
"path": "pulsar/apps/data/redis/store.py",
"copies": "5",
"size": "4614",
"license": "bsd-3-clause",
"hash": 3156420776598993400,
"line_mean": 32.4347826087,
"line_max": 79,
"alpha_frac": 0.6148677937,
"autogenerated": false,
"ratio": 4.225274725274725,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 138
} |
from functools import partial
from pulsar import Protocol
from pulsar.apps.data import PubSub
class PubsubProtocol(Protocol):
def __init__(self, handler, **kw):
super().__init__(handler._loop, **kw)
self.parser = self._producer._parser_class()
self.handler = handler
def execute(self, *args):
chunk = self.parser.multi_bulk(args)
self._transport.write(chunk)
# must be an asynchronous object like the base class method
yield None
def data_received(self, data):
parser = self.parser
parser.feed(data)
response = parser.get()
while response is not False:
if not isinstance(response, Exception):
if isinstance(response, list):
command = response[0]
if command == b'message':
response = response[1:3]
self.handler.broadcast(response)
elif command == b'pmessage':
response = response[2:4]
self.handler.broadcast(response)
else:
raise response
response = parser.get()
class RedisPubSub(PubSub):
'''Asynchronous Publish/Subscriber handler for pulsar and redis stores.
'''
def publish(self, channel, message):
if self._protocol:
message = self._protocol.encode(message)
return self.store.execute('PUBLISH', channel, message)
def count(self, *channels):
kw = {'subcommand': 'numsub'}
return self.store.execute('PUBSUB', 'NUMSUB', *channels, **kw)
def count_patterns(self):
kw = {'subcommand': 'numpat'}
return self.store.execute('PUBSUB', 'NUMPAT', **kw)
def channels(self, pattern=None):
'''Lists the currently active channels matching ``pattern``
'''
if pattern:
return self.store.execute('PUBSUB', 'CHANNELS', pattern)
else:
return self.store.execute('PUBSUB', 'CHANNELS')
def psubscribe(self, pattern, *patterns):
return self._subscribe('PSUBSCRIBE', pattern, *patterns)
def punsubscribe(self, *patterns):
if self._connection:
return self._connection.execute('PUNSUBSCRIBE', *patterns)
def subscribe(self, channel, *channels):
return self._subscribe('SUBSCRIBE', channel, *channels)
def unsubscribe(self, *channels):
'''Un-subscribe from a list of ``channels``.
'''
if self._connection:
return self._connection.execute('UNSUBSCRIBE', *channels)
def close(self):
'''Stop listening for messages.
'''
if self._connection:
yield from self._connection.execute('PUNSUBSCRIBE')
yield from self._connection.execute('UNSUBSCRIBE')
# INTERNALS
def _subscribe(self, *args):
if not self._connection:
protocol_factory = partial(PubsubProtocol, self,
producer=self.store)
self._connection = yield from self.store.connect(protocol_factory)
yield from self._connection.execute(*args)
| {
"repo_name": "ymero/pulsar",
"path": "pulsar/apps/data/redis/pubsub.py",
"copies": "5",
"size": "3178",
"license": "bsd-3-clause",
"hash": -8802111716602501000,
"line_mean": 33.5434782609,
"line_max": 78,
"alpha_frac": 0.5846444305,
"autogenerated": false,
"ratio": 4.395573997233749,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7480218427733748,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pydispatch import dispatcher
from icecrate import database
from icecrate import items
from icecrate.utils import keygen
tagkey = partial(keygen, "icecrate", "tags")
def all_tags(): # pragma: no cover
return database.smembers(tagkey(meta="all"))
def by_item_id(item_id):
"""Retrieves tag information for all tags attached to the item.
"""
item = items.by_item_id(item_id)
item_tags = _split_tags(item.get("tags", ""))
# yield from map(by_tag_id, item_tags)
for tag in map(by_tag_id, item_tags):
yield tag
def by_tag_id(tag_id): # pragma: no cover
"""Retrieves tag information.
"""
return database.hgetall(tagkey(tag_id))
def _split_tags(tags_field):
"""Split a tag string and return a set of tag IDs.
Empty tags and duplicates are filtered out.
"""
return set(filter(None, (tag.strip() for tag in tags_field.split(","))))
# handle events
def handle_item_preupdate(item):
tags = item.get("tags", "")
# filter duplicates and empties out of tag list
tags = _split_tags(tags)
# echo filtered tag list back to item
item["tags"] = ", ".join(tags)
for tag in tags:
# save tag name
database.hset(tagkey(tag), "name", tag)
# add tag to set of all tags
database.sadd(tagkey(meta="all"), tag)
dispatcher.connect(handle_item_preupdate, signal="icecrate.items.preupdate")
| {
"repo_name": "Artanis/icecrate",
"path": "icecrate/tags.py",
"copies": "1",
"size": "1435",
"license": "bsd-2-clause",
"hash": 449615211346356300,
"line_mean": 24.625,
"line_max": 76,
"alpha_frac": 0.6571428571,
"autogenerated": false,
"ratio": 3.384433962264151,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4541576819364151,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pydispatch import dispatcher
from icecrate import database
from icecrate.utils import keygen
itemkey = partial(keygen, "icecrate", "items")
def all_items(): # pragma: no cover
"""Retrieve set of all item ids from the database.
"""
return database.smembers(itemkey(meta="all"))
def by_item_id(item_id): # pragma: no cover
"""Retrieve an item from the database by it's id (UPC)
"""
return database.hgetall(itemkey(item_id))
def save_item_data(item_id, data):
"""Save item data to the database.
Fires the following signals:
1. icecrate.items.preupdate
2. icecrate.items.postupdate
During pre-update, item data is mutable. Handlers of this event are
free to modify the data. Order of handler execution is undefined.
Keys and values must always be strings or numbers.
When post-update is triggered, item data has already been inserted
into the database. Postupdate sends only the item id, rather than
full item data.
"""
dispatcher.send("icecrate.items.preupdate", item=data)
# icecrate.items.update
database.hmset(itemkey(item_id), data)
database.sadd(itemkey(meta="all"), item_id)
dispatcher.send("icecrate.items.postupdate", item_id=item_id)
| {
"repo_name": "Artanis/icecrate",
"path": "icecrate/items.py",
"copies": "1",
"size": "1281",
"license": "bsd-2-clause",
"hash": 5347343758983795000,
"line_mean": 28.1136363636,
"line_max": 71,
"alpha_frac": 0.7088212334,
"autogenerated": false,
"ratio": 3.588235294117647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9759604080974291,
"avg_score": 0.007490489308671127,
"num_lines": 44
} |
from functools import partial
from pykit.ir import vvisit, ArgLoader, verify_lowlevel
from pykit.ir import defs, opgrouper
from pykit.types import Boolean, Integral, Real, Pointer, Function, Int64
from pykit.codegen.llvm.llvm_types import llvm_type
import llvm.core as lc
from llvm.core import Type, Constant
#===------------------------------------------------------------------===
# Definitions
#===------------------------------------------------------------------===
compare_float = {
'>': lc.FCMP_OGT,
'<': lc.FCMP_OLT,
'==': lc.FCMP_OEQ,
'>=': lc.FCMP_OGE,
'<=': lc.FCMP_OLE,
'!=': lc.FCMP_ONE,
}
compare_signed_int = {
'>': lc.ICMP_SGT,
'<': lc.ICMP_SLT,
'==': lc.ICMP_EQ,
'>=': lc.ICMP_SGE,
'<=': lc.ICMP_SLE,
'!=': lc.ICMP_NE,
}
compare_unsiged_int = {
'>': lc.ICMP_UGT,
'<': lc.ICMP_ULT,
'==': lc.ICMP_EQ,
'>=': lc.ICMP_UGE,
'<=': lc.ICMP_ULE,
'!=': lc.ICMP_NE,
}
compare_bool = {
'==' : lc.ICMP_EQ,
'!=' : lc.ICMP_NE
}
# below based on from npm/codegen
def integer_invert(builder, val):
return builder.xor(val, Constant.int_signextend(val.type, -1))
def integer_usub(builder, val):
return builder.sub(Constant.int(val.type, 0), val)
def integer_not(builder, value):
return builder.icmp(lc.ICMP_EQ, value, Constant.int(value.type, 0))
def float_usub(builder, val):
return builder.fsub(Constant.real(val.type, 0), val)
def float_not(builder, val):
return builder.fcmp(lc.FCMP_OEQ, val, Constant.real(val.type, 0))
binop_int = {
'+': (lc.Builder.add, lc.Builder.add),
'-': (lc.Builder.sub, lc.Builder.sub),
'*': (lc.Builder.mul, lc.Builder.mul),
'/': (lc.Builder.sdiv, lc.Builder.udiv),
'//': (lc.Builder.sdiv, lc.Builder.udiv),
'%': (lc.Builder.srem, lc.Builder.urem),
'&': (lc.Builder.and_, lc.Builder.and_),
'|': (lc.Builder.or_, lc.Builder.or_),
'^': (lc.Builder.xor, lc.Builder.xor),
'<<': (lc.Builder.shl, lc.Builder.shl),
'>>': (lc.Builder.ashr, lc.Builder.lshr),
}
binop_float = {
'+': lc.Builder.fadd,
'-': lc.Builder.fsub,
'*': lc.Builder.fmul,
'/': lc.Builder.fdiv,
'//': lc.Builder.fdiv,
'%': lc.Builder.frem,
}
unary_bool = {
'!': integer_not,
}
unary_int = {
'~': integer_invert,
'!': integer_not,
"+": lambda builder, arg: arg,
"-": integer_usub,
}
unary_float = {
'!': float_not,
"+": lambda builder, arg: arg,
"-": float_usub,
}
#===------------------------------------------------------------------===
# Utils
#===------------------------------------------------------------------===
i1, i16, i32, i64 = map(Type.int, [1, 16, 32, 64])
def const_int(type, value):
return Constant.int(type, value)
const_i32 = partial(const_int, i32)
const_i64 = partial(const_int, i64)
zero = partial(const_int, value=0)
one = partial(const_int, value=1)
def sizeof(builder, ty, intp):
ptr = Type.pointer(ty)
null = Constant.null(ptr)
offset = builder.gep(null, [Constant.int(Type.int(), 1)])
return builder.ptrtoint(offset, intp)
#===------------------------------------------------------------------===
# Translator
#===------------------------------------------------------------------===
class Translator(object):
"""
Translate a function in low-level form.
This means it can only use values of type Bool, Int, Float, Struct or
Pointer. Values of type Function may be called.
"""
def __init__(self, func, env, lfunc, llvm_typer, llvm_module):
self.func = func
self.env = env
self.lfunc = lfunc
self.llvm_type = llvm_typer
self.lmod = llvm_module
self.builder = None
self.phis = [] # [pykit_phi]
def blockswitch(self, newblock):
if not self.builder:
self.builder = lc.Builder.new(newblock)
self.builder.position_at_end(newblock)
# __________________________________________________________________
def op_arg(self, arg):
return self.lfunc.args[self.func.args.index(arg)]
# __________________________________________________________________
def op_unary(self, op, arg):
opmap = { Boolean: unary_bool,
Integral: unary_int,
Real: unary_float }[type(op.type)]
unop = defs.unary_opcodes[op.opcode]
return opmap[unop](self.builder, arg)
def op_binary(self, op, left, right):
binop = defs.binary_opcodes[op.opcode]
if op.type.is_int:
genop = binop_int[binop][op.type.unsigned]
else:
genop = binop_float[binop]
return genop(self.builder, left, right, op.result)
def op_compare(self, op, left, right):
cmpop = defs.compare_opcodes[op.opcode]
type = op.args[0].type
if type.is_int and type.unsigned:
cmp, lop = self.builder.icmp, compare_unsiged_int[cmpop]
elif type.is_int or type.is_bool:
cmp, lop = self.builder.icmp, compare_signed_int[cmpop]
else:
cmp, lop = self.builder.fcmp, compare_float[cmpop]
return cmp(lop, left, right, op.result)
# __________________________________________________________________
def op_convert(self, op, arg):
from llpython.byte_translator import LLVMCaster
unsigned = op.type.is_int and op.type.unsigned
# The float cast doens't accept this keyword argument
kwds = {'unsigned': unsigned} if unsigned else {}
return LLVMCaster.build_cast(self.builder, arg,
self.llvm_type(op.type), **kwds)
# __________________________________________________________________
def op_call(self, op, function, args):
# Get the callee LLVM function from the cache. This is put there by
# pykit.codegen.codegen
cache = self.env["codegen.cache"]
lfunc = cache[function]
return self.builder.call(lfunc, args)
def op_call_math(self, op, name, args):
# Math is resolved by an LLVM postpass
argtypes = [arg.type for arg in args]
lfunc_type = self.llvm_type(Function(op.type, argtypes))
lfunc = self.lmod.get_or_insert_function(
lfunc_type, 'pykit.math.%s.%s' % (map(str, argtypes), name.lower()))
return self.builder.call(lfunc, args, op.result)
# __________________________________________________________________
def op_getfield(self, op, struct, attr):
index = const_i32(op.type.names.index(attr))
return self.builder.extract_value(struct, index, op.result)
def op_setfield(self, op, struct, attr, value):
index = const_i32(op.type.names.index(attr))
return self.builder.insert_element(struct, value, index, op.result)
# __________________________________________________________________
def op_getindex(self, op, array, indices):
return self.builder.gep(array, indices, op.result)
def op_setindex(self, op, array, indices, value):
ptr = self.builder.gep(array, indices)
self.builder.store(ptr, value)
# __________________________________________________________________
def op_getindex(self, op, array, indices):
return self.builder.gep(array, indices, op.result)
# __________________________________________________________________
def op_alloca(self, op):
llvm_pointer_type = self.llvm_type(op.type)
return self.builder.alloca(llvm_pointer_type.pointee, op.result)
def op_load(self, op, stackvar):
return self.builder.load(stackvar, op.result)
def op_store(self, op, value, stackvar):
self.builder.store(value, stackvar)
# __________________________________________________________________
def op_jump(self, op, block):
self.builder.branch(block)
def op_cbranch(self, op, test, true_block, false_block):
self.builder.cbranch(test, true_block, false_block)
def op_phi(self, op):
phi = self.builder.phi(self.llvm_type(op.type), op.result)
self.phis.append(op)
return phi
def op_ret(self, op, value):
if value is None:
assert self.func.type.restype.is_void
self.builder.ret_void()
else:
self.builder.ret(value)
# __________________________________________________________________
def op_sizeof(self, op, type):
int_type = self.llvm_type(op.type)
item_type = self.llvm_type(type)
return sizeof(self.builder, item_type, int_type, op.result)
def op_addressof(self, op, func):
assert func.address
addr = const_int(i64, func.address)
return self.builder.inttoptr(addr, self.llvm_type(Pointer(func.type)))
# __________________________________________________________________
def op_ptradd(self, op, ptr, val):
return self.builder.gep(ptr, [val], op.result)
def op_ptrload(self, op, ptr):
return self.builder.load(ptr, op.result)
def op_ptrstore(self, op, ptr, val):
return self.builder.store(val, ptr, op.result)
def op_ptrcast(self, op, val):
return self.builder.bitcast(val, self.llvm_type(op.type), op.result)
def op_ptr_isnull(self, op, val):
intval = self.builder.ptrtoint(val, self.llvm_type(Int64))
return self.builder.icmp(lc.ICMP_EQ, intval, zero(intval.type), op.result)
# __________________________________________________________________
def allocate_blocks(llvm_func, pykit_func):
"""Return a dict mapping pykit blocks to llvm blocks"""
blocks = {}
for block in pykit_func.blocks:
blocks[block] = llvm_func.append_basic_block(pykit_func.name)
return blocks
def update_phis(phis, valuemap, argloader):
"""
Update LLVM phi values given a list of pykit phi values and block and
value dicts mapping pykit values to LLVM values
"""
for phi in phis:
llvm_phi = valuemap[phi.result]
llvm_blocks = map(argloader.load_op, phi.args[0])
llvm_values = map(argloader.load_op, phi.args[1])
for llvm_block, llvm_value in zip(llvm_blocks, llvm_values):
llvm_phi.add_incoming(llvm_value, llvm_block)
#===------------------------------------------------------------------===
# Pass to group operations such as add/mul
#===------------------------------------------------------------------===
class LLVMArgLoader(ArgLoader):
"""
Load Operation arguments as LLVM values passed and extra *args to the
Translator.
"""
def __init__(self, store, engine, llvm_module, lfunc, blockmap):
super(LLVMArgLoader, self).__init__(store)
self.engine = engine
self.llvm_module = llvm_module
self.lfunc = lfunc
self.blockmap = blockmap
def load_GlobalValue(self, arg):
if arg.external:
value = self.lmod.get_or_insert_function(llvm_type(arg.type))
if arg.address:
self.engine.add_global_mapping(value, arg.address)
else:
assert arg.value
value = arg.value.const
return value
def load_Block(self, arg):
return self.blockmap[arg]
def load_Constant(self, arg):
ty = type(arg.type)
lty = llvm_type(arg.type)
if ty == Pointer:
if arg.const == 0:
return lc.Constant.null(lty)
else:
return const_i64(arg.const).inttoptr(i64)
elif ty == Integral:
if arg.type.unsigned:
return lc.Constant.int(lty, arg.const)
else:
return lc.Constant.int_signextend(lty, arg.const)
elif ty == Real:
return lc.Constant.real(lty, arg.const)
else:
raise NotImplementedError("Constants for", ty)
def load_Undef(self, arg):
return lc.Constant.undef(llvm_type(arg.type))
def initialize(func, env):
verify_lowlevel(func)
llvm_module = env["codegen.llvm.module"]
return llvm_module.add_function(llvm_type(func.type), func.name)
def translate(func, env, lfunc):
engine, llvm_module = env["codegen.llvm.engine"], env["codegen.llvm.module"]
blockmap = allocate_blocks(lfunc, func)
### Create visitor ###
translator = Translator(func, env, lfunc, llvm_type, llvm_module)
visitor = opgrouper(translator)
### Codegen ###
argloader = LLVMArgLoader(None, engine, llvm_module, lfunc, blockmap)
valuemap = vvisit(visitor, func, argloader)
update_phis(translator.phis, valuemap, argloader)
return lfunc | {
"repo_name": "Inaimathi/pykit",
"path": "pykit/codegen/llvm/llvm_codegen.py",
"copies": "2",
"size": "12597",
"license": "bsd-3-clause",
"hash": 6072332041225850000,
"line_mean": 31.137755102,
"line_max": 82,
"alpha_frac": 0.5386996904,
"autogenerated": false,
"ratio": 3.50695991091314,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5045659601313139,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pyparsing import alphanums, nums, oneOf, opAssoc, operatorPrecedence,\
CaselessLiteral, Combine, Keyword, Literal, MatchFirst, Optional,\
ParseException, Regex, Word, ZeroOrMore
from bamboo.core.aggregations import AGGREGATIONS
from bamboo.core.operations import EvalAndOp, EvalCaseOp, EvalComparisonOp,\
EvalConstant, EvalExpOp, EvalDate, EvalInOp, EvalMapOp, EvalMultOp,\
EvalNotOp, EvalOrOp, EvalPercentile, EvalPlusOp, EvalSignOp, EvalString,\
EvalToday
def build_caseless_or_expression(strings):
literals = [CaselessLiteral(aggregation) for aggregation in strings]
return reduce(lambda or_expr, literal: or_expr | literal, literals)
def get_dependent_columns(parsed_expr, dataset):
return __find_dependent_columns(dataset, parsed_expr, [])
def __find_dependent_columns(dataset, parsed_expr, result):
"""Find dependent columns for a dataset and parsed expression.
:param dataset: The dataset to find dependent columns for.
:param parsed_expr: The parsed formula expression.
"""
dependent_columns = parsed_expr.dependent_columns(dataset)
result.extend(dependent_columns)
for child in parsed_expr.get_children():
__find_dependent_columns(dataset, child, result)
return result
class ParseError(Exception):
"""For errors while parsing formulas."""
pass
class Parser(object):
"""Class for parsing and evaluating formula.
Attributes:
- aggregation: Aggregation parsed from formula.
- aggregation_names: Possible aggregations.
- bnf: Cached Backus-Naur Form of formula.
- column_functions: Cached additional columns as aggregation parameters.
- function_names: Names of possible functions in formulas.
- operator_names: Names of possible operators in formulas.
- parsed_expr: Cached parsed expression.
- special_names: Names of possible reserved names in formulas.
- reserved_words: List of all possible reserved words that may be used in
formulas.
"""
aggregation = None
aggregation_names = AGGREGATIONS.keys()
bnf = None
column_functions = None
function_names = ['date', 'percentile', 'today']
operator_names = ['and', 'or', 'not', 'in']
parsed_expr = None
special_names = ['default']
reserved_words = aggregation_names + function_names + operator_names +\
special_names
def __init__(self):
self.bnf = self.__build_bnf()
@classmethod
def dependent_columns(cls, formula, dataset):
functions, _ = cls.parse(formula)
columns = [get_dependent_columns(f, dataset) for f in functions]
return set.union(set(), *columns)
@property
def functions(self):
return self.column_functions if self.aggregation else self.parsed_expr
def store_aggregation(self, _, __, tokens):
"""Cached a parsed aggregation."""
self.aggregation = tokens[0]
self.column_functions = tokens[1:]
def __build_bnf(self):
"""Parse formula to function based on language definition.
Backus-Naur Form of formula language:
========= ==========
Operation Expression
========= ==========
addop '+' | '-'
multop '*' | '/'
expop '^'
compop '==' | '<' | '>' | '<=' | '>='
notop 'not'
andop 'and'
orop 'or'
real \d+(.\d+)
integer \d+
variable \w+
string ".+"
atom real | integer | variable
func func ( atom )
factor atom [ expop factor]*
term factor [ multop factor ]*
expr term [ addop term ]*
equation expr [compop expr]*
in string in '[' "string"[, "string"]* ']'
neg [notop]* equation | in
conj neg [andop neg]*
disj conj [orop conj]*
case 'case' disj: atom[, disj: atom]*[, 'default': atom]
trans trans ( case )
agg agg ( trans[, trans]* )
========= ==========
"""
if self.bnf:
return self.bnf
# literal operators
exp_op = Literal('^')
sign_op = oneOf('+ -')
mult_op = oneOf('* /')
plus_op = oneOf('+ -')
not_op = CaselessLiteral('not')
and_op = CaselessLiteral('and')
or_op = CaselessLiteral('or')
in_op = CaselessLiteral('in').suppress()
comparison_op = oneOf('< <= > >= != ==')
case_op = CaselessLiteral('case').suppress()
# aggregation functions
aggregations = build_caseless_or_expression(self.aggregation_names)
# literal syntactic
open_bracket = Literal('[').suppress()
close_bracket = Literal(']').suppress()
open_paren = Literal('(').suppress()
close_paren = Literal(')').suppress()
comma = Literal(',').suppress()
dquote = Literal('"').suppress()
colon = Literal(':').suppress()
# functions
date_func = CaselessLiteral('date')
percentile_func = CaselessLiteral('percentile')
today_func = CaselessLiteral('today()').setParseAction(EvalToday)
# case statment
default = CaselessLiteral('default')
reserved_words = MatchFirst(
[Keyword(word) for word in self.reserved_words])
# atoms
integer = Word(nums)
real = Combine(Word(nums) + '.' + Word(nums))
variable = ~reserved_words + Word(alphanums + '_')
atom = real | integer | variable
atom.setParseAction(EvalConstant)
# everything between pairs of double quotes is a string
string = dquote + Regex('[^"]+') + dquote
string.setParseAction(EvalString)
# expressions
in_list = open_bracket + string +\
ZeroOrMore(comma + string) + close_bracket
func_expr = operatorPrecedence(string, [
(date_func, 1, opAssoc.RIGHT, EvalDate),
]) | today_func
arith_expr = operatorPrecedence(atom | func_expr, [
(sign_op, 1, opAssoc.RIGHT, EvalSignOp),
(exp_op, 2, opAssoc.RIGHT, EvalExpOp),
(mult_op, 2, opAssoc.LEFT, EvalMultOp),
(plus_op, 2, opAssoc.LEFT, EvalPlusOp),
])
comp_expr = operatorPrecedence(arith_expr, [
(comparison_op, 2, opAssoc.LEFT, EvalComparisonOp),
])
prop_expr = operatorPrecedence(comp_expr | in_list, [
(in_op, 2, opAssoc.RIGHT, EvalInOp),
(not_op, 1, opAssoc.RIGHT, EvalNotOp),
(and_op, 2, opAssoc.LEFT, EvalAndOp),
(or_op, 2, opAssoc.LEFT, EvalOrOp),
])
default_statement = (default + colon + atom).setParseAction(EvalMapOp)
map_statement = (prop_expr + colon + atom).setParseAction(EvalMapOp)
case_list = map_statement + ZeroOrMore(
comma + map_statement) + Optional(comma + default_statement)
case_expr = operatorPrecedence(case_list, [
(case_op, 1, opAssoc.RIGHT, EvalCaseOp),
]) | prop_expr
trans_expr = operatorPrecedence(case_expr, [
(percentile_func, 1, opAssoc.RIGHT, EvalPercentile),
])
return ((aggregations + open_paren + Optional(
trans_expr + ZeroOrMore(comma + trans_expr)))
.setParseAction(self.store_aggregation) + close_paren)\
| trans_expr
@classmethod
def parse(cls, formula):
"""Parse formula and return evaluation function.
Parse `formula` into an aggregation name and functions.
There will be multiple functions is the aggregation takes multiple
arguments, e.g. ratio which takes a numerator and denominator formula.
Examples:
- constants
- ``9 + 5``,
- aliases
- ``rating``,
- ``gps``,
- arithmetic
- ``amount + gps_alt``,
- ``amount - gps_alt``,
- ``amount + 5``,
- ``amount - gps_alt + 2.5``,
- ``amount * gps_alt``,
- ``amount / gps_alt``,
- ``amount * gps_alt / 2.5``,
- ``amount + gps_alt * gps_precision``,
- precedence
- ``(amount + gps_alt) * gps_precision``,
- comparison
- ``amount == 2``,
- ``10 < amount``,
- ``10 < amount + gps_alt``,
- logical
- ``not amount == 2``,
- ``not(amount == 2)``,
- ``amount == 2 and 10 < amount``,
- ``amount == 2 or 10 < amount``,
- ``not not amount == 2 or 10 < amount``,
- ``not amount == 2 or 10 < amount``,
- ``not amount == 2) or 10 < amount``,
- ``not(amount == 2 or 10 < amount)``,
- ``amount ^ 3``,
- ``amount + gps_alt) ^ 2 + 100``,
- ``amount``,
- ``amount < gps_alt - 100``,
- membership
- ``rating in ["delectible"]``,
- ``risk_factor in ["low_risk"]``,
- ``amount in ["9.0", "2.0", "20.0"]``,
- ``risk_factor in ["low_risk"]) and (amount in ["9.0", "20.0"])``,
- dates
- ``date("09-04-2012") - submit_date > 21078000``,
- cases
- ``case food_type in ["morning_food"]: 1, default: 3``
- transformations: row-wise column based aggregations
- ``percentile(amount)``
:param formula: The string to parse.
:returns: A tuple with the name of the aggregation in the formula, if
any and a list of functions built from the input string.
"""
parser = cls()
try:
parser.parsed_expr = parser.bnf.parseString(formula, parseAll=True)
except ParseException, err:
raise ParseError('Parse Failure for string "%s": %s' % (
formula, err))
return [parser.functions, parser.aggregation]
@classmethod
def parse_aggregation(cls, formula):
_, a = cls.parse(formula)
return a
@classmethod
def parse_function(cls, formula):
return cls.parse_functions(formula)[0]
@classmethod
def parse_functions(cls, formula):
return [partial(f.eval) for f in cls.parse(formula)[0]]
@classmethod
def validate(cls, dataset, formula, groups):
"""Validate `formula` and `groups` for dataset.
Validate the formula and group string by attempting to get a row from
the dframe for the dataset and then running parser validation on this
row. Additionally, ensure that the groups in the group string are
columns in the dataset.
:param dataset: The dataset to validate for.
:param formula: The formula to validate.
:param groups: A list of columns to group by.
:returns: The aggregation (or None) for the formula.
"""
cls.validate_formula(formula, dataset)
for group in groups:
if not group in dataset.schema.keys():
raise ParseError(
'Group %s not in dataset columns.' % group)
@classmethod
def validate_formula(cls, formula, dataset):
"""Validate the *formula* on an example *row* of data.
Rebuild the BNF then parse the `formula` given the sample `row`.
:param formula: The formula to validate.
:param dataset: The dataset to validate against.
:returns: The aggregation for the formula.
"""
# check valid formula
cls.parse(formula)
schema = dataset.schema
if not schema:
raise ParseError(
'No schema for dataset, please add data or wait for it to '
'finish processing')
for column in cls.dependent_columns(formula, dataset):
if column not in schema.keys():
raise ParseError('Missing column reference: %s' % column)
def __getstate__(self):
"""Get state for pickle."""
return [
self.aggregation,
self.aggregation_names,
self.function_names,
self.operator_names,
self.special_names,
self.reserved_words,
self.special_names,
]
def __setstate__(self, state):
"""Set internal variables from pickled state."""
self.aggregation, self.aggregation_names, self.function_names,\
self.operator_names, self.special_names, self.reserved_words,\
self.special_names = state
self.__build_bnf()
| {
"repo_name": "pld/bamboo",
"path": "bamboo/core/parser.py",
"copies": "2",
"size": "12674",
"license": "bsd-3-clause",
"hash": 3235487308347919000,
"line_mean": 33.6284153005,
"line_max": 79,
"alpha_frac": 0.5665141234,
"autogenerated": false,
"ratio": 4.217637271214643,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00046340940747883856,
"num_lines": 366
} |
from functools import partial
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_arg.plugins import hook
from electrum_arg.i18n import _
from electrum_arg_gui.qt import EnterButton
from electrum_arg_gui.qt.util import ThreadedButton, Buttons
from electrum_arg_gui.qt.util import WindowModalDialog, OkButton
from labels import LabelsPlugin
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.emit(SIGNAL('labels_changed'), wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
window.connect(window.app, SIGNAL('labels_changed'), window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "argentumproject/electrum-arg",
"path": "plugins/labels/qt.py",
"copies": "1",
"size": "2045",
"license": "mit",
"hash": 3728527760502373000,
"line_mean": 31.9838709677,
"line_max": 80,
"alpha_frac": 0.6215158924,
"autogenerated": false,
"ratio": 4.122983870967742,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00019912385503783353,
"num_lines": 62
} |
from functools import partial
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_cesc.plugins import hook
from electrum_cesc.i18n import _
from electrum_cesc_gui.qt import EnterButton
from electrum_cesc_gui.qt.util import ThreadedButton, Buttons
from electrum_cesc_gui.qt.util import WindowModalDialog, OkButton
from labels import LabelsPlugin
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.emit(SIGNAL('labels_changed'), wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
window.connect(window.app, SIGNAL('labels_changed'), window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "Marcdnd/electrum-cesc",
"path": "plugins/labels/qt.py",
"copies": "1",
"size": "2050",
"license": "mit",
"hash": -1028435937784564100,
"line_mean": 32.064516129,
"line_max": 80,
"alpha_frac": 0.6224390244,
"autogenerated": false,
"ratio": 4.091816367265469,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.521425539166547,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_dgb.plugins import hook
from electrum_dgb.i18n import _
from electrum_dgb_gui.qt import EnterButton
from electrum_dgb_gui.qt.util import ThreadedButton, Buttons
from electrum_dgb_gui.qt.util import WindowModalDialog, OkButton
from labels import LabelsPlugin
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.emit(SIGNAL('labels_changed'), wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
window.connect(window.app, SIGNAL('labels_changed'), window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "protonn/Electrum-Cash",
"path": "plugins/labels/qt.py",
"copies": "1",
"size": "2045",
"license": "mit",
"hash": -1200133445251655000,
"line_mean": 31.9838709677,
"line_max": 80,
"alpha_frac": 0.6215158924,
"autogenerated": false,
"ratio": 4.081836327345309,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00019912385503783353,
"num_lines": 62
} |
from functools import partial
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum.plugins import hook
from electrum.i18n import _
from electrum_gui.qt import EnterButton
from electrum_gui.qt.util import ThreadedButton, Buttons
from electrum_gui.qt.util import WindowModalDialog, OkButton
from labels import LabelsPlugin
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.emit(SIGNAL('labels_changed'), wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
window.connect(window.app, SIGNAL('labels_changed'), window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "parkbyte/electrumparkbyte",
"path": "plugins/labels/qt.py",
"copies": "14",
"size": "2025",
"license": "mit",
"hash": 3848333242469787600,
"line_mean": 31.6612903226,
"line_max": 80,
"alpha_frac": 0.6202469136,
"autogenerated": false,
"ratio": 4.166666666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from electrum_vtc.plugins import hook
from electrum_vtc.i18n import _
from electrum_vtc_gui.qt import EnterButton
from electrum_vtc_gui.qt.util import ThreadedButton, Buttons
from electrum_vtc_gui.qt.util import WindowModalDialog, OkButton
from labels import LabelsPlugin
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.emit(SIGNAL('labels_changed'), wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
window.connect(window.app, SIGNAL('labels_changed'), window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "vertcoin/electrum-vtc",
"path": "plugins/labels/qt.py",
"copies": "4",
"size": "2045",
"license": "mit",
"hash": -7584358201030574000,
"line_mean": 31.9838709677,
"line_max": 80,
"alpha_frac": 0.6215158924,
"autogenerated": false,
"ratio": 4.081836327345309,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6703352219745309,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QInputDialog, QLabel, QVBoxLayout, QLineEdit
from electrum.i18n import _
from electrum.plugin import hook
from electrum.wallet import Standard_Wallet
from electrum.gui.qt.util import WindowModalDialog
from .ledger import LedgerPlugin, Ledger_Client
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
class Plugin(LedgerPlugin, QtPluginBase):
icon_unpaired = "ledger_unpaired.png"
icon_paired = "ledger.png"
def create_handler(self, window):
return Ledger_Handler(window)
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) == self.keystore_class and len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0]))
menu.addAction(_("Show on Ledger"), show_address)
class Ledger_Handler(QtHandlerBase):
setup_signal = pyqtSignal()
auth_signal = pyqtSignal(object, object)
def __init__(self, win):
super(Ledger_Handler, self).__init__(win, 'Ledger')
self.setup_signal.connect(self.setup_dialog)
self.auth_signal.connect(self.auth_dialog)
def word_dialog(self, msg):
response = QInputDialog.getText(self.top_level_window(), "Ledger Wallet Authentication", msg, QLineEdit.Password)
if not response[1]:
self.word = None
else:
self.word = str(response[0])
self.done.set()
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(self.top_level_window(), _("Ledger Status"))
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def auth_dialog(self, data, client: 'Ledger_Client'):
try:
from .auth2fa import LedgerAuthDialog
except ImportError as e:
self.message_dialog(repr(e))
return
dialog = LedgerAuthDialog(self, data, client=client)
dialog.exec_()
self.word = dialog.pin
self.done.set()
def get_auth(self, data, *, client: 'Ledger_Client'):
self.done.clear()
self.auth_signal.emit(data, client)
self.done.wait()
return self.word
def get_setup(self):
self.done.clear()
self.setup_signal.emit()
self.done.wait()
return
def setup_dialog(self):
self.show_error(_('Initialization of Ledger HW devices is currently disabled.'))
| {
"repo_name": "spesmilo/electrum",
"path": "electrum/plugins/ledger/qt.py",
"copies": "1",
"size": "2752",
"license": "mit",
"hash": 2954452151811930000,
"line_mean": 32.156626506,
"line_max": 121,
"alpha_frac": 0.6460755814,
"autogenerated": false,
"ratio": 3.689008042895442,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4835083624295442,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QInputDialog, QLabel, QVBoxLayout, QLineEdit
from electrum_ltc.i18n import _
from electrum_ltc.plugin import hook
from electrum_ltc.wallet import Standard_Wallet
from electrum_ltc.gui.qt.util import WindowModalDialog
from .ledger import LedgerPlugin, Ledger_Client
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
class Plugin(LedgerPlugin, QtPluginBase):
icon_unpaired = "ledger_unpaired.png"
icon_paired = "ledger.png"
def create_handler(self, window):
return Ledger_Handler(window)
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) == self.keystore_class and len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0]))
menu.addAction(_("Show on Ledger"), show_address)
class Ledger_Handler(QtHandlerBase):
setup_signal = pyqtSignal()
auth_signal = pyqtSignal(object, object)
def __init__(self, win):
super(Ledger_Handler, self).__init__(win, 'Ledger')
self.setup_signal.connect(self.setup_dialog)
self.auth_signal.connect(self.auth_dialog)
def word_dialog(self, msg):
response = QInputDialog.getText(self.top_level_window(), "Ledger Wallet Authentication", msg, QLineEdit.Password)
if not response[1]:
self.word = None
else:
self.word = str(response[0])
self.done.set()
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(self.top_level_window(), _("Ledger Status"))
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def auth_dialog(self, data, client: 'Ledger_Client'):
try:
from .auth2fa import LedgerAuthDialog
except ImportError as e:
self.message_dialog(repr(e))
return
dialog = LedgerAuthDialog(self, data, client=client)
dialog.exec_()
self.word = dialog.pin
self.done.set()
def get_auth(self, data, *, client: 'Ledger_Client'):
self.done.clear()
self.auth_signal.emit(data, client)
self.done.wait()
return self.word
def get_setup(self):
self.done.clear()
self.setup_signal.emit()
self.done.wait()
return
def setup_dialog(self):
self.show_error(_('Initialization of Ledger HW devices is currently disabled.'))
| {
"repo_name": "pooler/electrum-ltc",
"path": "electrum_ltc/plugins/ledger/qt.py",
"copies": "1",
"size": "2768",
"license": "mit",
"hash": -3655205941857157000,
"line_mean": 32.3493975904,
"line_max": 121,
"alpha_frac": 0.6466763006,
"autogenerated": false,
"ratio": 3.6517150395778364,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9791707015485975,
"avg_score": 0.0013368649383723953,
"num_lines": 83
} |
from functools import partial
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QInputDialog, QLabel, QVBoxLayout, QLineEdit
from electrum_mona.i18n import _
from electrum_mona.plugin import hook
from electrum_mona.wallet import Standard_Wallet
from electrum_mona.gui.qt.util import WindowModalDialog
from .ledger import LedgerPlugin, Ledger_Client
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
class Plugin(LedgerPlugin, QtPluginBase):
icon_unpaired = "ledger_unpaired.png"
icon_paired = "ledger.png"
def create_handler(self, window):
return Ledger_Handler(window)
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) == self.keystore_class and len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0]))
menu.addAction(_("Show on Ledger"), show_address)
class Ledger_Handler(QtHandlerBase):
setup_signal = pyqtSignal()
auth_signal = pyqtSignal(object, object)
def __init__(self, win):
super(Ledger_Handler, self).__init__(win, 'Ledger')
self.setup_signal.connect(self.setup_dialog)
self.auth_signal.connect(self.auth_dialog)
def word_dialog(self, msg):
response = QInputDialog.getText(self.top_level_window(), "Ledger Wallet Authentication", msg, QLineEdit.Password)
if not response[1]:
self.word = None
else:
self.word = str(response[0])
self.done.set()
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(self.top_level_window(), _("Ledger Status"))
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def auth_dialog(self, data, client: 'Ledger_Client'):
try:
from .auth2fa import LedgerAuthDialog
except ImportError as e:
self.message_dialog(repr(e))
return
dialog = LedgerAuthDialog(self, data, client=client)
dialog.exec_()
self.word = dialog.pin
self.done.set()
def get_auth(self, data, *, client: 'Ledger_Client'):
self.done.clear()
self.auth_signal.emit(data, client)
self.done.wait()
return self.word
def get_setup(self):
self.done.clear()
self.setup_signal.emit()
self.done.wait()
return
def setup_dialog(self):
self.show_error(_('Initialization of Ledger HW devices is currently disabled.'))
| {
"repo_name": "wakiyamap/electrum-mona",
"path": "electrum_mona/plugins/ledger/qt.py",
"copies": "1",
"size": "2772",
"license": "mit",
"hash": 2359518758556208000,
"line_mean": 32.3975903614,
"line_max": 121,
"alpha_frac": 0.6471861472,
"autogenerated": false,
"ratio": 3.6569920844327175,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9797493906940855,
"avg_score": 0.0013368649383723953,
"num_lines": 83
} |
from functools import partial
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QWidget, QGridLayout, QPushButton
class SongSelectWidget(QWidget):
song_selected = pyqtSignal(int)
def __init__(self, *args, **kwargs):
super(SongSelectWidget, self).__init__(*args, **kwargs)
self.setStyleSheet("""
QPushButton {
background-color: white;
border: 1px solid #cccccc;
border-radius: 3px;
padding: 8px 16px;
margin: 2px;
}
QPushButton:pressed {
background-color: #f9f9f9;
}
QPushButton:checked {
background-color: #D5FCCD;
}
QPushButton:disabled {
background-color: #f3f3f3;
}
""")
layout = QGridLayout()
layout.setHorizontalSpacing(1)
layout.setVerticalSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self._buttons = []
def reset(self, song_numbers):
for b in self._buttons:
b.setParent(None)
b.deleteLater()
self._buttons = []
if song_numbers:
if song_numbers[-1] % 10 == 0:
max_number = song_numbers[-1]
else:
max_number = (song_numbers[-1] // 10 + 1) * 10
for i in range(max(30, max_number)):
button = QPushButton()
button.setCheckable(True)
button.setAutoExclusive(True)
button.setText(str(i + 1))
button.setEnabled(i + 1 in song_numbers)
self.layout().addWidget(button, i // 10, i % 10)
button.pressed.connect(partial(self.song_button_pressed, i + 1))
self._buttons.append(button)
def song_button_pressed(self, song_number):
self.song_selected.emit(song_number)
| {
"repo_name": "maccesch/songscreen",
"path": "song_select_widget.py",
"copies": "1",
"size": "1980",
"license": "bsd-3-clause",
"hash": 5776669647018893000,
"line_mean": 27.2857142857,
"line_max": 80,
"alpha_frac": 0.5247474747,
"autogenerated": false,
"ratio": 4.159663865546219,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5184411340246219,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QLabel, QApplication, QPushButton
class ScreenSelectWidget(QWidget):
screen_selected = pyqtSignal(int)
def __init__(self, *args, **kwargs):
super(ScreenSelectWidget, self).__init__(*args, **kwargs)
self._active_screen = -1
self._buttons = []
layout = QVBoxLayout()
# layout.addStretch(1)
layout.addWidget(QLabel(self.tr("Lyrics screen")))
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self.refresh_widget()
desktop = QApplication.desktop()
desktop.screenCountChanged.connect(self.refresh_widget)
# TODO : update to qt 5.6 and uncomment the next line
# QApplication.instance().primaryScreenChanged.connect(self.refresh_widget)
def refresh_widget(self, screen_count=0):
layout = self.layout()
for b in self._buttons:
b.setParent(None)
b.deleteLater()
desktop = QApplication.desktop()
self._buttons = []
for i in range(screen_count if screen_count > 0 else desktop.screenCount()):
button = QPushButton()
button.setCheckable(True)
button.setAutoExclusive(True)
if i == self._active_screen:
button.setChecked(True)
button.setText("{} {}{}".format(
self.tr("Screen"),
i + 1,
"" if desktop.screenNumber(self) != i else " ({})".format(self.tr("this", "refers to the screen"))
))
layout.addWidget(button)
button.pressed.connect(partial(self._screen_button_pressed, i))
self._buttons.append(button)
self.repaint()
def _screen_button_pressed(self, screen_number):
self.screen_selected.emit(screen_number)
@property
def active_screen(self):
return self._active_screen
@active_screen.setter
def active_screen(self, value):
layout = self.layout()
if value >= len(self._buttons):
value = len(self._buttons) - 1
self._active_screen = value
self._buttons[self._active_screen].setChecked(True)
| {
"repo_name": "maccesch/songscreen",
"path": "screen_select_widget.py",
"copies": "1",
"size": "2268",
"license": "bsd-3-clause",
"hash": 2168263619252270600,
"line_mean": 29.24,
"line_max": 114,
"alpha_frac": 0.6014109347,
"autogenerated": false,
"ratio": 4.1614678899082564,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5262878824608256,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import (QHBoxLayout, QLabel, QVBoxLayout)
from electroncash.plugins import hook
from electroncash.i18n import _
from electroncash_gui.qt import EnterButton
from electroncash_gui.qt.main_window import ElectrumWindow
from electroncash_gui.qt.util import ThreadedButton, Buttons
from electroncash_gui.qt.util import WindowModalDialog, OkButton, WaitingDialog
from electroncash.util import Weak
from .labels import LabelsPlugin
class LabelsSignalObject(QObject):
''' Signals need to be members of a QObject, hence why this class exists. '''
labels_changed_signal = pyqtSignal(object)
wallet_not_synched_signal = pyqtSignal(object)
request_exception_signal = pyqtSignal(object, object)
def window_parent(w):
# this is needed because WindowModalDialog overrides window.parent
if callable(w.parent): return w.parent()
return w.parent
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = LabelsSignalObject()
self.wallet_windows = {}
self.initted = False
def requires_settings(self):
return True
def settings_widget(self, window):
while window and window_parent(window) and not isinstance(window_parent(window), ElectrumWindow):
# MacOS fixup -- find window.parent() because we can end up with window.parent() not an ElectrumWindow
window = window_parent(window)
windowRef = Weak.ref(window)
return EnterButton(_('Settings'),
partial(self.settings_dialog, windowRef))
def settings_dialog(self, windowRef):
window = windowRef() # NB: window is the internal plugins dialog and not the wallet window
if not window or not isinstance(window_parent(window), ElectrumWindow): return
wallet = window_parent(window).wallet
d = WindowModalDialog(window.top_level_window(), _("Label Settings"))
d.ok_button = OkButton(d)
dlgRef = Weak.ref(d)
if wallet in self.wallets:
class MySigs(QObject):
ok_button_disable_sig = pyqtSignal(bool)
d.sigs = MySigs(d)
d.sigs.ok_button_disable_sig.connect(d.ok_button.setDisabled) # disable ok button while the TaskThread runs ..
hbox = QHBoxLayout()
hbox.addWidget(QLabel(_("LabelSync options:")))
upload = ThreadedButton(_("Force upload"),
partial(Weak(self.do_force_upload), wallet, dlgRef),
partial(Weak(self.done_processing), dlgRef),
partial(Weak(self.error_processing), dlgRef))
download = ThreadedButton(_("Force download"),
partial(Weak(self.do_force_download), wallet, dlgRef),
partial(Weak(self.done_processing), dlgRef),
partial(Weak(self.error_processing), dlgRef))
d.thread_buts = (upload, download)
d.finished.connect(partial(Weak(self.on_dlg_finished), dlgRef))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
else:
vbox = QVBoxLayout(d)
if wallet.network:
# has network, so the fact that the wallet isn't in the list means it's incompatible
l = QLabel('<b>' + _("LabelSync not supported for this wallet type") + '</b>')
l.setAlignment(Qt.AlignCenter)
vbox.addWidget(l)
l = QLabel(_("(Only deterministic wallets are supported)"))
l.setAlignment(Qt.AlignCenter)
vbox.addWidget(l)
else:
# Does not have network, so we won't speak of incompatibility, but instead remind user offline mode means OFFLINE! ;)
l = QLabel(_("You are using Electron Cash in offline mode; restart Electron Cash if you want to get connected"))
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addSpacing(20)
vbox.addLayout(Buttons(d.ok_button))
return bool(d.exec_())
def on_dlg_finished(self, dlgRef, result_code):
''' Wait for any threaded buttons that may be still extant so we don't get a crash '''
#self.print_error("Dialog finished with code", result_code)
dlg = dlgRef()
if dlg:
upload, download = dlg.thread_buts
if upload.thread and upload.thread.isRunning():
upload.thread.stop(); upload.thread.wait()
if download.thread and download.thread.isRunning():
download.thread.stop(); download.thread.wait()
def do_force_upload(self, wallet, dlgRef):
# this runs in a NON-GUI thread
dlg = dlgRef()
if dlg: dlg.sigs.ok_button_disable_sig.emit(True) # block window closing prematurely which can cause a temporary hang until thread completes
self.push_thread(wallet)
def do_force_download(self, wallet, dlgRef):
# this runs in a NON-GUI thread
dlg = dlgRef()
if dlg: dlg.sigs.ok_button_disable_sig.emit(True) # block window closing prematurely which can cause a temporary hang until thread completes
self.pull_thread(wallet, True)
def done_processing(self, dlgRef, result):
# this runs in the GUI thread
dlg = dlgRef()
if dlg:
dlg.ok_button.setEnabled(True)
self._ok_synched(dlg)
def _ok_synched(self, window):
if window.isVisible():
window.show_message(_("Your labels have been synchronised."))
def error_processing(self, dlgRef, exc_info):
dlg = dlgRef()
if dlg:
dlg.ok_button.setEnabled(True)
self._notok_synch(dlg, exc_info)
_warn_dlg_flg = Weak.KeyDictionary()
def _notok_synch(self, window, exc_info):
# Runs in main thread
cls = __class__
if window.isVisible() and not cls._warn_dlg_flg.get(window, False):
# Guard against duplicate error dialogs (without this we may get error window spam when importing labels)
cls._warn_dlg_flg[window] = True
window.show_warning(_("LabelSync error:") + "\n\n" + str(exc_info[1]), rich_text=False)
cls._warn_dlg_flg.pop(window, None)
def on_request_exception(self, wallet, exc_info):
# not main thread
self.obj.request_exception_signal.emit(wallet, exc_info)
def request_exception_slot(self, wallet, exc_info):
# main thread
window = self.wallet_windows.get(wallet, None)
if window: self._notok_synch(window, exc_info)
def start_wallet(self, wallet, window=None):
ret = super().start_wallet(wallet)
if ret and window:
self.wallet_windows[wallet] = window
return ret
def stop_wallet(self, wallet):
ret = super().stop_wallet(wallet)
window = self.wallet_windows.pop(wallet, None)
return ret
def on_pulled(self, wallet):
# not main thread
super().on_pulled(wallet) # super just logs to print_error
self.obj.labels_changed_signal.emit(wallet)
def on_labels_changed(self, wallet):
# main thread
window = self.wallet_windows.get(wallet, None)
if window:
#self.print_error("On labels changed", wallet.basename())
window.update_labels()
def on_wallet_not_synched(self, wallet):
# not main thread
self.obj.wallet_not_synched_signal.emit(wallet)
def wallet_not_synched_slot(self, wallet):
# main thread
window = self.wallet_windows.get(wallet, None)
if window:
if window.question(_("LabelSync detected that this wallet is not synched with the label server.")
+ "\n\n" + _("Synchronize now?")):
WaitingDialog(window, _("Synchronizing..."),
partial(self.pull_thread, wallet, True),
lambda *args: self._ok_synched(window),
lambda exc: self._notok_synch(window, exc))
def on_close(self):
if not self.initted:
return
try: self.obj.labels_changed_signal.disconnect(self.on_labels_changed)
except TypeError: pass # not connected
try: self.obj.wallet_not_synched_signal.disconnect(self.wallet_not_synched_slot)
except TypeError: pass # not connected
try: self.obj.request_exception_signal.disconnect(self.request_exception_slot)
except TypeError: pass # not connected
super().on_close()
assert 0==len(self.wallet_windows), "LabelSync still had extant wallet_windows!"
self.initted = False
@hook
def on_new_window(self, window):
return self.start_wallet(window.wallet, window)
@hook
def on_close_window(self, window):
return self.stop_wallet(window.wallet)
@hook
def init_qt(self, gui):
if self.initted:
return
self.on_init()
# connect signals. this needs to happen first as below on_new_window depends on these being active
self.obj.labels_changed_signal.connect(self.on_labels_changed)
self.obj.wallet_not_synched_signal.connect(self.wallet_not_synched_slot)
self.obj.request_exception_signal.connect(self.request_exception_slot)
ct, ct2 = 0, 0
for window in gui.windows:
if self.on_new_window(window):
ct2 += 1
ct += 1
self.initted = True
self.print_error("Initialized (had {} extant windows, added {}).".format(ct,ct2))
| {
"repo_name": "fyookball/electrum",
"path": "plugins/labels/qt.py",
"copies": "1",
"size": "9928",
"license": "mit",
"hash": -6536225406980029000,
"line_mean": 41.6094420601,
"line_max": 148,
"alpha_frac": 0.6144238517,
"autogenerated": false,
"ratio": 4.037413582757218,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5151837434457218,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import (QHBoxLayout, QLabel, QVBoxLayout)
from electrum.plugins import hook
from electrum.i18n import _
from electrum_gui.qt import EnterButton
from electrum_gui.qt.util import ThreadedButton, Buttons
from electrum_gui.qt.util import WindowModalDialog, OkButton
from .labels import LabelsPlugin
class QLabelsSignalObject(QObject):
labels_changed_signal = pyqtSignal(object)
class Plugin(LabelsPlugin):
def __init__(self, *args):
LabelsPlugin.__init__(self, *args)
self.obj = QLabelsSignalObject()
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'),
partial(self.settings_dialog, window))
def settings_dialog(self, window):
wallet = window.parent().wallet
d = WindowModalDialog(window, _("Label Settings"))
hbox = QHBoxLayout()
hbox.addWidget(QLabel("Label sync options:"))
upload = ThreadedButton("Force upload",
partial(self.push_thread, wallet),
partial(self.done_processing, d))
download = ThreadedButton("Force download",
partial(self.pull_thread, wallet, True),
partial(self.done_processing, d))
vbox = QVBoxLayout()
vbox.addWidget(upload)
vbox.addWidget(download)
hbox.addLayout(vbox)
vbox = QVBoxLayout(d)
vbox.addLayout(hbox)
vbox.addSpacing(20)
vbox.addLayout(Buttons(OkButton(d)))
return bool(d.exec_())
def on_pulled(self, wallet):
self.obj.labels_changed_signal.emit(wallet)
def done_processing(self, dialog, result):
dialog.show_message(_("Your labels have been synchronised."))
@hook
def on_new_window(self, window):
self.obj.labels_changed_signal.connect(window.update_tabs)
self.start_wallet(window.wallet)
@hook
def on_close_window(self, window):
self.stop_wallet(window.wallet)
| {
"repo_name": "digitalbitbox/electrum",
"path": "plugins/labels/qt.py",
"copies": "3",
"size": "2168",
"license": "mit",
"hash": -9154392979003385000,
"line_mean": 31.3582089552,
"line_max": 74,
"alpha_frac": 0.6356088561,
"autogenerated": false,
"ratio": 4.1138519924098675,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 67
} |
from functools import partial
from PyQt5.QtWidgets import (
QPushButton,
QLabel,
QVBoxLayout,
QLineEdit,
QHBoxLayout,
)
from PyQt5.QtCore import Qt, QMetaObject, Q_RETURN_ARG, pyqtSlot
from electrum.gui.qt.util import (
WindowModalDialog,
OkButton,
ButtonsTextEdit,
)
from electrum.i18n import _
from electrum.plugin import hook
from .bitbox02 import BitBox02Plugin
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
class Plugin(BitBox02Plugin, QtPluginBase):
icon_unpaired = "bitbox02_unpaired.png"
icon_paired = "bitbox02.png"
def create_handler(self, window):
return BitBox02_Handler(window)
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
# Context menu on each address in the Addresses Tab, right click...
if len(addrs) != 1:
return
for keystore in wallet.get_keystores():
if type(keystore) == self.keystore_class:
def show_address(keystore=keystore):
keystore.thread.add(
partial(self.show_address, wallet, addrs[0], keystore=keystore)
)
device_name = "{} ({})".format(self.device, keystore.label)
menu.addAction(_("Show on {}").format(device_name), show_address)
@only_hook_if_libraries_available
@hook
def show_xpub_button(self, mpk_text: ButtonsTextEdit, keystore):
# user is about to see the "Wallet Information" dialog
# - add a button to show the xpub on the BitBox02 device
if type(keystore) != self.keystore_class:
return
def on_button_click():
keystore.thread.add(
partial(self.show_xpub, keystore=keystore)
)
device_name = "{} ({})".format(self.device, keystore.label)
mpk_text.addButton("eye1.png", on_button_click, _("Show on {}").format(device_name))
class BitBox02_Handler(QtHandlerBase):
def __init__(self, win):
super(BitBox02_Handler, self).__init__(win, "BitBox02")
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(
self.top_level_window(), _("BitBox02 Status")
)
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def name_multisig_account(self):
return QMetaObject.invokeMethod(
self,
"_name_multisig_account",
Qt.BlockingQueuedConnection,
Q_RETURN_ARG(str),
)
@pyqtSlot(result=str)
def _name_multisig_account(self):
dialog = WindowModalDialog(None, "Create Multisig Account")
vbox = QVBoxLayout()
label = QLabel(
_(
"Enter a descriptive name for your multisig account.\nYou should later be able to use the name to uniquely identify this multisig account"
)
)
hl = QHBoxLayout()
hl.addWidget(label)
name = QLineEdit()
name.setMaxLength(30)
name.resize(200, 40)
he = QHBoxLayout()
he.addWidget(name)
okButton = OkButton(dialog)
hlb = QHBoxLayout()
hlb.addWidget(okButton)
hlb.addStretch(2)
vbox.addLayout(hl)
vbox.addLayout(he)
vbox.addLayout(hlb)
dialog.setLayout(vbox)
dialog.exec_()
return name.text().strip()
| {
"repo_name": "spesmilo/electrum",
"path": "electrum/plugins/bitbox02/qt.py",
"copies": "1",
"size": "3535",
"license": "mit",
"hash": -6481809635611866000,
"line_mean": 29.474137931,
"line_max": 154,
"alpha_frac": 0.6059405941,
"autogenerated": false,
"ratio": 3.8257575757575757,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9928145164424094,
"avg_score": 0.0007106010866964772,
"num_lines": 116
} |
from functools import partial
from pyramid.config import Configurator
from sqlalchemy.orm import joinedload
from clld.web.app import menu_item, CtxFactoryQuery
from clld.db.models import common
from clld.interfaces import ICtxFactoryQuery
# we must make sure custom models are known at database initialization!
from dogonlanguages import models
from dogonlanguages.interfaces import IVillage, IFile, IMovie
from dogonlanguages import views
_ = lambda s: s
_('Parameters')
_('Sources')
_('Contributors')
_('Other')
class CustomFactoryQuery(CtxFactoryQuery):
def refined_query(self, query, model, req):
"""To be overridden.
Derived classes may override this method to add model-specific query
refinements of their own.
"""
if model == common.Contribution:
query = query.options(
joinedload(common.Contribution.references)
.joinedload(common.ContributionReference.source),
joinedload(common.Contribution.data),
)
return query
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
settings['route_patterns'] = {
'villages': '/geography.cfm',
'parameters': '/thesaurus.cfm',
'sources': '/bibliography.cfm',
'languages': '/languages.cfm',
'florafauna': '/florafauna.cfm',
'bangime': '/bangime.cfm',
'file': '/_files/{id:[^/\.]+}',
'file_alt': '/_files/{id:[^/\.]+}.{ext}',
}
config = Configurator(settings=settings)
config.include('clldmpg')
config.register_menu(
('dataset', partial(menu_item, 'dataset', label='Home')),
('languages', partial(menu_item, 'languages')),
('values', partial(menu_item, 'values', label='Lexicon')),
('parameters', partial(menu_item, 'parameters', label='Thesaurus')),
('villages', partial(menu_item, 'villages', label='Villages')),
('florafauna', partial(menu_item, 'florafauna', label='Flora-Fauna')),
#('contributors', partial(menu_item, 'contributors', label='Project members')),
('sources', partial(menu_item, 'sources', label='Materials')),
#('bangime', partial(menu_item, 'bangime', label='Bangime')),
#('other', partial(menu_item, 'other', label='Other Languages')),
('movies', partial(menu_item, 'movies', label='Videos')),
)
home_comp = config.registry.settings['home_comp']
home_comp = [
'bangime', 'other',
'contributors'] + home_comp
config.add_settings({'home_comp': home_comp})
config.register_resource('village', models.Village, IVillage, with_index=True)
config.register_resource('movie', models.Movie, IMovie, with_index=True)
config.register_resource('file', models.File, IFile, with_index=True)
config.registry.registerUtility(CustomFactoryQuery(), ICtxFactoryQuery)
config.add_page('bangime')
config.add_page('florafauna')
config.add_page('other')
config.add_page('typology')
return config.make_wsgi_app()
| {
"repo_name": "clld/dogonlanguages",
"path": "dogonlanguages/__init__.py",
"copies": "1",
"size": "3074",
"license": "apache-2.0",
"hash": 8900822941337025000,
"line_mean": 36.950617284,
"line_max": 87,
"alpha_frac": 0.6476903058,
"autogenerated": false,
"ratio": 3.6858513189448443,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48335416247448443,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pyramid.config import Configurator
from clld import interfaces
from clld.web.app import menu_item
from clld_glottologfamily_plugin.util import LanguageByFamilyMapMarker
# we must make sure custom models are known at database initialization!
from dictionaria import models
from dictionaria import md
_ = lambda s: s
_('Parameter')
_('Parameters')
_('Sentence')
_('Sentences')
_('Contributor')
_('Contributors')
_('Contribution')
_('Contributions')
def link_attrs(req, obj, **kw):
if interfaces.IUnitValue.providedBy(obj):
kw['href'] = req.route_url('unit', id=obj.unit.id, **kw.pop('url_kw', {}))
return kw
class MyMapMarker(LanguageByFamilyMapMarker):
def get_icon(self, ctx, req):
if interfaces.IValue.providedBy(ctx):
ctx = ctx.valueset.language
if interfaces.IValueSet.providedBy(ctx):
ctx = ctx.language
return LanguageByFamilyMapMarker.get_icon(self, ctx, req)
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
config.registry.registerUtility(link_attrs, interfaces.ILinkAttrs)
config.include('clldmpg')
config.include('clld_glottologfamily_plugin')
config.registry.registerUtility(MyMapMarker(), interfaces.IMapMarker)
config.add_page('submit')
config.add_page('help')
config.register_menu(
('dataset', partial(menu_item, 'dataset', label='Home')),
('contributions', partial(menu_item, 'contributions')),
('contributors', partial(menu_item, 'contributors')),
('sentences', partial(menu_item, 'sentences')),
('help', lambda ctx, rq: (rq.route_url('help'), u'Help')),
)
config.add_settings(home_comp=['submit', 'languages'] + config.get_settings()['home_comp'])
for cls in [md.BibTex, md.ReferenceManager]:
for if_ in [interfaces.IRepresentation, interfaces.IMetadata]:
config.register_adapter(cls, interfaces.IContribution, if_, name=cls.mimetype)
return config.make_wsgi_app()
| {
"repo_name": "clld/dictionaria",
"path": "dictionaria/__init__.py",
"copies": "1",
"size": "2102",
"license": "apache-2.0",
"hash": 6941073361912910000,
"line_mean": 31.84375,
"line_max": 95,
"alpha_frac": 0.6883920076,
"autogenerated": false,
"ratio": 3.681260945709282,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4869652953309282,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PySide import QtCore, QtGui
from jukeboxcore.gui.widgetdelegate import CommentDelegate
class AbstractLevel(object):
"""Mixin for QtGui.QWidget for a level of a browser
A level is a widget that should display data of a specific root index
of its model. So it can be just a regular view, but it can also be a combobox.
It can also emit a signal to state that the level below this one should have a new
root index. You are free to emit the signal in whatever case you want.
When subclassing implement :meth:`AbstractLevel.model_changed`, :meth:`AbstractLevel.set_root`, :meth:`AbstractLevel.selected_indexes`.
"""
new_root = QtCore.Signal(QtCore.QModelIndex)
"""This signal says that the level under this one
should update its root index to the one of the signal.
"""
def __init__(self, *args, **kwargs):
"""Constructs a new level. All arguments will be passed on.
:raises: None
"""
super(AbstractLevel, self).__init__(*args, **kwargs)
self._model = None
def set_model(self, m):
"""Set the model for the level
:param m: the model that the level should use
:type m: QtCore.QAbstractItemModel
:returns: None
:rtype: None
:raises: None
"""
self._model = m
self.new_root.emit(QtCore.QModelIndex())
self.model_changed(m)
def get_model(self, ):
"""Return the model that the level uses
:returns: the model
:rtype: QtCore.QAbstractItemModel|None
:raises: None
"""
return self._model
def model_changed(self, model):
"""Abstract method that should handle the case that someone set the model
When a level instance is created, the model is None. So it has to be set afterwards.
Then this method will be called and your level should somehow use the model
:param model: the model that the level should use | None
:type model: QtCore.QAbstractItemModel
:returns: None
:rtype: None
:raises: NotImplementedError
"""
raise NotImplementedError
def set_root(self, index):
"""Abstract method that should make the level use the given index as root
The index might also be invalid! In that case show an empty level.
The index might also be from a different model. In that case change the
model of the level::
if self.get_model() != index.model():
self.set_model(index.model())
:param index: the new root index
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: NotImplementedError
"""
raise NotImplementedError
def selected_indexes(self, ):
"""Abstract method that should return the \"selected\" indexes.
Selected does not mean, selected like Qt refers to the term. It just means
that this level has some indexes that seam to be of importance right now.
E.g. your level is a combobox, then the selected indexes would just consit of the
current index. If your level is a regular view, then you could indeed return the selected
indexes.
:returns: the \'selected\' indexes of the level
:rtype: list of QtCore.QModelIndex
:raises: NotImplementedError
"""
raise NotImplementedError
def set_index(self, index):
"""Set the current index of the level to the given one
The given index should be the new root for levels below.
You should make sure that the new_root signal will be emitted.
:param index: the new index
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: NotImplementedError
"""
raise NotImplementedError
class AbstractTreeBrowser(QtGui.QWidget):
"""A abstract class for a treebrowser
A tree browser can be compared to a column view.
The browser uses a tree model and on initialisation
creates levels up to a certain depth.
Each level displays on level of hierarchy of the model.
When subclassing implement :meth:`AbstractTreeBrowser.create_level`,
:meth:`AbstractTreeBrowser.add_lvl_to_ui`, :meth:`AbstractTreeBrowser.create_level`
and for headers reimplement :meth:`AbstractTreeBrowser.create_header`
"""
def __init__(self, depth, parent=None, flags=0):
"""Constructs an AbstractTreeBrowser
:param depth: the depth of the browser
:type depth: int
:param parent: the parent of the widget
:type parent: QtGui.QWidget
:param flags: the flags for the widget
:type flags: QtCore.Qt.WindowFlags
:raises: None
"""
super(AbstractTreeBrowser, self).__init__(parent, flags)
self._depth = depth
self._levels = []
self.model = None
def build_browser(self, ):
"""Creates all levels and adds them to the ui
:returns: None
:rtype: None
:raises: None
"""
for i in range(self._depth):
self._new_level(i)
def create_level(self, depth):
"""Create and return a level for the given depth
The model and root of the level will be automatically set by the browser.
:param depth: the depth level that the level should handle
:type depth: int
:returns: a new level for the given depth
:rtype: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:raises: NotImplementedError
"""
raise NotImplementedError
def create_header(self, depth):
"""Create and return a widget that will be used as a header for the given depth
Override this method if you want to have header widgets.
The default implementation returns None.
You can return None if you do not want a header for the given depth
:param depth: the depth level
:type depth: int
:returns: a Widget that is used for the header or None
:rtype: QtGui.QWidget|None
:raises: None
"""
return None
def add_lvl_to_ui(self, level, header):
"""Abstract method that is responsible for inserting the level and header
into the ui.
:param level: a newly created level
:type level: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:param header: a newly created header
:type header: QtCore.QWidget|None
:returns: None
:rtype: None
:raises: NotImplementedError
"""
raise NotImplementedError
def set_model(self, model):
"""Set all levels\' model to the given one
:param m: the model that the levels should use
:type m: QtCore.QAbstractItemModel
:returns: None
:rtype: None
:raises: None
"""
# do the set model in reverse!
# set model might trigger an update for the lower levels
# but the lower ones have a different model, so it will fail anyways
# this way the initial state after set_model is correct.
self.model = model
self._levels[0].set_model(model)
def set_root(self, depth, index):
"""Set the level\'s root of the given depth to index
:param depth: the depth level
:type depth: int
:param index: the new root index
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
if depth < len(self._levels):
self._levels[depth].set_root(index)
def _new_level(self, depth):
"""Create a new level and header and connect signals
:param depth: the depth level
:type depth: int
:returns: None
:rtype: None
:raises: None
"""
l = self.create_level(depth)
h = self.create_header(depth)
self.add_lvl_to_ui(l, h)
l.new_root.connect(partial(self.set_root, depth+1))
self._levels.append(l)
def get_level(self, depth):
"""Return the level for the given depth
:param depth: the hierarchy level
:type depth: int
:returns: the level widget
:rtype: :class:`AbstractLevel`
:raises: None
"""
return self._levels[depth]
def get_depth(self, ):
"""Return the current depth of the browser
:returns: the hierarchy depth of the browser
:rtype: int
:raises: None
"""
return self._depth
def selected_indexes(self, depth):
"""Get the selected indexes of a certain depth level
:param depth: the depth level
:type depth: int
:returns: the selected indexes of the given depth level
:rtype: list of QtCore.QModelIndex
:raises: None
"""
return self._levels[depth].selected_indexes()
def set_index(self, depth, index):
"""Set the level at the given depth to the given index
:param depth: addresses the level at the given depth
:type depth: int
:param index: the index to set the level to
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
self._levels[depth].set_index(index)
class CBLevel(AbstractLevel, QtGui.QComboBox):
"""A level that consists of a simple combobox to be used in a TreeBrowser
"""
def __init__(self, parent=None):
"""Constructs a new cblevel with the given parent
:param parent: the parent widget
:type parent: QtGui.QWidget
:raises: None
"""
super(CBLevel, self).__init__(parent)
self.currentIndexChanged.connect(self.current_changed)
def model_changed(self, model):
"""Apply the model to the combobox
When a level instance is created, the model is None. So it has to be set afterwards.
Then this method will be called and your level should somehow use the model
:param model: the model that the level should use
:type model: QtCore.QAbstractItemModel
:returns: None
:rtype: None
:raises: None
"""
self.setModel(model)
def set_root(self, index):
"""Set the given index as root index of the combobox
:param index: the new root index
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
if not index.isValid():
self.setCurrentIndex(-1)
return
if self.model() != index.model():
self.setModel(index.model())
self.setRootModelIndex(index)
if self.model().rowCount(index):
self.setCurrentIndex(0)
else:
self.setCurrentIndex(-1)
def selected_indexes(self, ):
"""Return the current index
:returns: the current index in a list
:rtype: list of QtCore.QModelIndex
:raises: None
"""
i = self.model().index(self.currentIndex(), 0, self.rootModelIndex())
return [i]
def current_changed(self, i):
"""Slot for when the current index changes.
Emits the :data:`AbstractLevel.new_root` signal.
:param index: the new current index
:type index: int
:returns: None
:rtype: None
:raises: None
"""
m = self.model()
ri = self.rootModelIndex()
index = m.index(i, 0, ri)
self.new_root.emit(index)
def set_index(self, index):
"""Set the current index to the row of the given index
:param index: the index to set the level to
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
self.setCurrentIndex(index.row())
class ComboBoxBrowser(AbstractTreeBrowser):
"""A tree browser that has a combo box for every level
and a label for every header.
The header labels will be next to each combobox.
"""
def __init__(self, depth, parent=None, flags=0, headers=None):
"""Constructs a new ComboBoxBrowser with the given depth
:param depth: the depth of the browser
:type depth: int
:param parent: the parent of the widget
:type parent: QtGui.QWidget
:param flags: the flags for the widget
:type flags: QtCore.Qt.WindowFlags
:param headers: a list of label texts to put for the labels next to the comboboxes
the list does not need to have the length of ``depth``.
If the list is None, no headers will be created.
:type headers: list of str|None
:raises: None
"""
super(ComboBoxBrowser, self).__init__(depth, parent, flags)
self._headertexts = headers
self.setup_ui()
self.build_browser()
def setup_ui(self, ):
"""Create the layouts and set some attributes of the ui
:returns: None
:rtype: None
:raises: None
"""
grid = QtGui.QGridLayout(self)
grid.setContentsMargins(0, 0, 0, 0)
self.setLayout(grid)
def create_level(self, depth):
"""Create and return a level for the given depth
The model and root of the level will be automatically set by the browser.
:param depth: the depth level that the level should handle
:type depth: int
:returns: a new level for the given depth
:rtype: :class:`CBLevel`
:raises: None
"""
cb = CBLevel(parent=self)
return cb
def create_header(self, depth):
"""Create and return a widget that will be used as a header for the given depth
Override this method if you want to have header widgets.
The default implementation returns None.
You can return None if you do not want a header for the given depth
:param depth: the depth level
:type depth: int
:returns: a Widget that is used for the header or None
:rtype: QtGui.QWidget|None
:raises: None
"""
if not (depth >= 0 and depth < len(self._headertexts)):
return
txt = self._headertexts[depth]
if txt is None:
return
lbl = QtGui.QLabel(txt, self)
return lbl
def add_lvl_to_ui(self, level, header):
"""Insert the level and header into the ui.
:param level: a newly created level
:type level: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:param header: a newly created header
:type header: QtCore.QWidget|None
:returns: None
:rtype: None
:raises: None
"""
lay = self.layout()
rc = lay.rowCount()
lay.addWidget(level, rc+1, 1)
if header is not None:
lay.addWidget(header, rc+1, 0)
lay.setColumnStretch(1,1)
class ListLevel(AbstractLevel, QtGui.QListView):
"""A level that consists of a listview to be used in a TreeBrowser
"""
def __init__(self, parent=None):
"""Constructs a new listlevel with the given parent
:param parent: the parent widget
:type parent: QtGui.QWidget
:raises: None
"""
super(ListLevel, self).__init__(parent)
def model_changed(self, model):
"""Apply the model to the combobox
When a level instance is created, the model is None. So it has to be set afterwards.
Then this method will be called and your level should somehow use the model
:param model: the model that the level should use
:type model: QtCore.QAbstractItemModel
:returns: None
:rtype: None
:raises: None
"""
self.setModel(model)
# to update all lists belwo
# current changed is not triggered by setModel somehow
if model is not None:
self.setCurrentIndex(self.model().index(0, 0))
def set_root(self, index):
"""Set the given index as root index of list
:param index: the new root index
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
if not index.isValid():
self.setModel(None) # so we will not see toplevel stuff
self.setCurrentIndex(QtCore.QModelIndex())
self.new_root.emit(QtCore.QModelIndex())
return
if self.model() != index.model():
self.setModel(index.model())
self.setRootIndex(index)
if self.model().hasChildren(index):
self.setCurrentIndex(self.model().index(0, 0, index))
self.new_root.emit(self.model().index(0, 0, index))
else:
self.new_root.emit(QtCore.QModelIndex())
def selected_indexes(self, ):
"""Return the current index
:returns: the current index in a list
:rtype: list of QtCore.QModelIndex
:raises: None
"""
return [self.currentIndex()]
def currentChanged(self, current, prev):
"""Slot for when the current index changes.
Emits the :data:`AbstractLevel.new_root` signal.
:param current: the new current index
:type current: QtGui.QModelIndex
:param current: the previous current index
:type current: QtGui.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
m = self.model()
p = current.parent()
index = m.index(current.row(), 0, p)
self.new_root.emit(index)
return super(ListLevel, self).currentChanged(current, prev)
def set_index(self, index):
"""Set the current index to the row of the given index
:param index: the index to set the level to
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
self.setCurrentIndex(index)
self.new_root.emit(index)
self.scrollTo(index)
def resizeEvent(self, event):
"""Schedules an item layout if resize mode is \"adjust\". Somehow this is
needed for correctly scaling down items.
The reason this was reimplemented was the CommentDelegate.
:param event: the resize event
:type event: QtCore.QEvent
:returns: None
:rtype: None
:raises: None
"""
if self.resizeMode() == self.Adjust:
self.scheduleDelayedItemsLayout()
return super(ListLevel, self).resizeEvent(event)
class ListBrowser(AbstractTreeBrowser):
"""A tree browser that has a list view for every level
and a label for every header.
The header labels will be above each list.
"""
def __init__(self, depth, parent=None, flags=0, headers=None):
"""Constructs a new ListBrowser with the given depth
:param depth: the depth of the browser
:type depth: int
:param parent: the parent of the widget
:type parent: QtGui.QWidget
:param flags: the flags for the widget
:type flags: QtCore.Qt.WindowFlags
:param headers: a list of label texts to put for the labels above the lists
the list does not need to have the length of ``depth``.
If the list is None or an element is None, no headers will be created.
:type headers: list of str|None
:raises: None
"""
super(ListBrowser, self).__init__(depth, parent, flags)
self._headertexts = headers
self.setup_ui()
self.build_browser()
def setup_ui(self, ):
"""Create the layouts and set some attributes of the ui
:returns: None
:rtype: None
:raises: None
"""
grid = QtGui.QGridLayout(self)
self.setLayout(grid)
self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
self.splitter = QtGui.QSplitter(QtCore.Qt.Horizontal, self)
grid.addWidget(self.splitter)
grid.setContentsMargins(0, 0, 0, 0)
def create_level(self, depth):
"""Create and return a level for the given depth
The model and root of the level will be automatically set by the browser.
:param depth: the depth level that the level should handle
:type depth: int
:returns: a new level for the given depth
:rtype: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:raises: None
"""
ll = ListLevel(parent=self)
return ll
def create_header(self, depth):
"""Create and return a widget that will be used as a header for the given depth
Override this method if you want to have header widgets.
The default implementation returns None.
You can return None if you do not want a header for the given depth
:param depth: the depth level
:type depth: int
:returns: a Widget that is used for the header or None
:rtype: QtGui.QWidget|None
:raises: None
"""
if not (depth >= 0 and depth < len(self._headertexts)):
return
txt = self._headertexts[depth]
if txt is None:
return
lbl = QtGui.QLabel(txt, self)
return lbl
def add_lvl_to_ui(self, level, header):
"""Insert the level and header into the ui.
:param level: a newly created level
:type level: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:param header: a newly created header
:type header: QtCore.QWidget|None
:returns: None
:rtype: None
:raises: None
"""
w = QtGui.QWidget(self)
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(0, 0, 0, 0)
w.setLayout(vbox)
if header is not None:
vbox.addWidget(header)
vbox.addWidget(level)
self.splitter.addWidget(w)
class CommentBrowser(ListBrowser):
"""A list browser that has a list view for every level
and a label for every header.
The header labels will be above each list.
The lists are designed to display :class:`jukeboxcore.gui.djitemdata.NoteItemData`.
"""
def create_level(self, depth):
"""Create and return a level for the given depth
The model and root of the level will be automatically set by the browser.
:param depth: the depth level that the level should handle
:type depth: int
:returns: a new level for the given depth
:rtype: :class:`jukeboxcore.gui.widgets.browser.AbstractLevel`
:raises: None
"""
ll = ListLevel(parent=self)
ll.setEditTriggers(ll.DoubleClicked | ll.SelectedClicked | ll.CurrentChanged)
#ll.setSelectionBehavior(ll.SelectRows)
ll.setResizeMode(ll.Adjust)
self.delegate = CommentDelegate(ll)
ll.setItemDelegate(self.delegate)
ll.setVerticalScrollMode(ll.ScrollPerPixel)
return ll
| {
"repo_name": "JukeboxPipeline/jukebox-core",
"path": "src/jukeboxcore/gui/widgets/browser.py",
"copies": "1",
"size": "23060",
"license": "bsd-3-clause",
"hash": 8719329090429435000,
"line_mean": 32.1798561151,
"line_max": 139,
"alpha_frac": 0.6115784909,
"autogenerated": false,
"ratio": 4.3575207860922145,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5469099276992214,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from PySide import QtGui, QtCore
from jukeboxcore.gui.widgets.reftrackwidget_ui import Ui_ReftrackWidget
from jukeboxcore.gui.widgets.optionselector_ui import Ui_OptionSelector
from jukeboxcore.gui.widgets.browser import ComboBoxBrowser
from jukeboxcore.gui.widgetdelegate import WidgetDelegate
from jukeboxcore.gui.main import JB_Dialog, get_icon
from jukeboxcore.gui.reftrackitemdata import REFTRACK_OBJECT_ROLE
class OptionSelector(JB_Dialog, Ui_OptionSelector):
"""Widget to select options when importing or referencing
"""
def __init__(self, reftrack, parent=None):
"""Initialize a new OptionSelector
:param reftrack: the reftrack to show options for
:type reftrack: :class:`jukeboxcore.reftrack.Reftrack`
:param parent: the parent widget
:type parent: :class:`QtGui.QWidget`
:raises: None
"""
super(OptionSelector, self).__init__(parent)
self.setupUi(self)
self.selected = None
self.reftrack = reftrack
self.setup_ui()
self.setup_signals()
options = reftrack.get_options()
self.browser.set_model(options)
columns = self.reftrack.get_option_columns()
for i, c in enumerate(columns):
self.browser.get_level(i).setModelColumn(c)
self.adjustSize()
def setup_ui(self, ):
"""Setup the ui
:returns: None
:rtype: None
:raises: None
"""
labels = self.reftrack.get_option_labels()
self.browser = ComboBoxBrowser(len(labels), headers=labels)
self.browser_vbox.addWidget(self.browser)
def setup_signals(self, ):
"""Connect the signals with the slots to make the ui functional
:returns: None
:rtype: None
:raises: None
"""
self.select_pb.clicked.connect(self.select)
def select(self, ):
"""Store the selected taskfileinfo self.selected and accept the dialog
:returns: None
:rtype: None
:raises: None
"""
s = self.browser.selected_indexes(self.browser.get_depth()-1)
if not s:
return
i = s[0].internalPointer()
if i:
tfi = i.internal_data()
self.selected = tfi
self.accept()
class ReftrackWidget(Ui_ReftrackWidget, QtGui.QFrame):
"""Widget to display Reftracks in a Widgetdelegate
"""
def __init__(self, parent=None):
"""Initialize a new ReftrackWidget
:param parent: widget parent
:type parent: QtGui.QWidget
:raises: None
"""
super(ReftrackWidget, self).__init__(parent)
self.setupUi(self)
self.reftrack = None
self.setup_ui()
self.setup_signals()
self.upper_fr_default_bg_color = self.upper_fr.palette().color(QtGui.QPalette.Window)
def setup_ui(self, ):
"""Setup the ui
:returns: None
:rtype: None
:raises: None
"""
self.setup_icons()
def setup_icons(self, ):
"""Setup the icons of the ui
:returns: None
:rtype: None
:raises: None
"""
iconbtns = [("menu_border_24x24.png", self.menu_tb),
("duplicate_border_24x24.png", self.duplicate_tb),
("delete_border_24x24.png", self.delete_tb),
("reference_border_24x24.png", self.reference_tb),
("load_border_24x24.png", self.load_tb),
("unload_border_24x24.png", self.unload_tb),
("replace_border_24x24.png", self.replace_tb),
("import_border_24x24.png", self.importref_tb),
("import_border_24x24.png", self.importtf_tb),
("alien.png", self.alien_tb),
("imported.png", self.imported_tb)]
for iconname, btn in iconbtns:
i = get_icon(iconname, asicon=True)
btn.setIcon(i)
def setup_signals(self, ):
"""Connect the signals with the slots to make the ui functional
:returns: None
:rtype: None
:raises: None
"""
self.duplicate_tb.clicked.connect(self.duplicate)
self.delete_tb.clicked.connect(self.delete)
self.load_tb.clicked.connect(self.load)
self.unload_tb.clicked.connect(self.unload)
self.reference_tb.clicked.connect(self.reference)
self.importtf_tb.clicked.connect(self.import_file)
self.importref_tb.clicked.connect(self.import_reference)
self.replace_tb.clicked.connect(self.replace)
self.imported_tb.clicked.connect(partial(self.toggle_tbstyle, button=self.imported_tb))
self.alien_tb.clicked.connect(partial(self.toggle_tbstyle, button=self.alien_tb))
def set_index(self, index):
"""Display the data of the given index
:param index: the index to paint
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
self.index = index
self.reftrack = index.model().index(index.row(), 18, index.parent()).data(REFTRACK_OBJECT_ROLE)
self.set_maintext(self.index)
self.set_identifiertext(self.index)
self.set_type_icon(self.index)
self.disable_restricted()
self.hide_restricted()
self.set_top_bar_color(self.index)
self.set_status_buttons()
self.set_menu()
def set_maintext(self, index):
"""Set the maintext_lb to display text information about the given reftrack
:param index: the index
:type index: :class:`QtGui.QModelIndex`
:returns: None
:rtype: None
:raises: None
"""
dr = QtCore.Qt.DisplayRole
text = ""
model = index.model()
for i in (1, 2, 3, 5, 6):
new = model.index(index.row(), i, index.parent()).data(dr)
if new is not None:
text = " | ".join((text, new)) if text else new
self.maintext_lb.setText(text)
def set_identifiertext(self, index):
"""Set the identifier text on the identifier_lb
:param index: the index
:type index: :class:`QtGui.QModelIndex`
:returns: None
:rtype: None
:raises: None
"""
dr = QtCore.Qt.DisplayRole
t = index.model().index(index.row(), 17, index.parent()).data(dr)
if t is None:
t = -1
else:
t = t+1
self.identifier_lb.setText("#%s" % t)
def set_type_icon(self, index):
"""Set the type icon on type_icon_lb
:param index: the index
:type index: :class:`QtGui.QModelIndex`
:returns: None
:rtype: None
:raises: None
"""
icon = index.model().index(index.row(), 0, index.parent()).data(QtCore.Qt.DecorationRole)
if icon:
pix = icon.pixmap(self.type_icon_lb.size())
self.type_icon_lb.setPixmap(pix)
else:
self.type_icon_lb.setPixmap(None)
def disable_restricted(self, ):
"""Disable the restricted buttons
:returns: None
:rtype: None
:raises: None
"""
todisable = [(self.reftrack.duplicate, self.duplicate_tb),
(self.reftrack.delete, self.delete_tb),
(self.reftrack.reference, self.reference_tb),
(self.reftrack.replace, self.replace_tb),]
for action, btn in todisable:
res = self.reftrack.is_restricted(action)
btn.setDisabled(res)
def hide_restricted(self, ):
"""Hide the restricted buttons
:returns: None
:rtype: None
:raises: None
"""
tohide = [((self.reftrack.unload, self.unload_tb),
(self.reftrack.load, self.load_tb)),
((self.reftrack.import_file, self.importtf_tb),
(self.reftrack.import_reference, self.importref_tb))]
for (action1, btn1), (action2, btn2) in tohide:
res1 = self.reftrack.is_restricted(action1)
res2 = self.reftrack.is_restricted(action2)
if res1 != res2:
btn1.setEnabled(True)
btn1.setHidden(res1)
btn2.setHidden(res2)
else: # both are restricted, then show one but disable it
btn1.setDisabled(True)
btn1.setVisible(True)
btn2.setVisible(False)
def set_top_bar_color(self, index):
"""Set the color of the upper frame to the background color of the reftrack status
:param index: the index
:type index: :class:`QtGui.QModelIndex`
:returns: None
:rtype: None
:raises: None
"""
dr = QtCore.Qt.ForegroundRole
c = index.model().index(index.row(), 8, index.parent()).data(dr)
if not c:
c = self.upper_fr_default_bg_color
self.upper_fr.setStyleSheet('background-color: rgb(%s, %s, %s)' % (c.red(), c.green(), c.blue()))
def set_status_buttons(self, ):
"""Depending on the status of the reftrack, enable or disable
the status buttons, for imported/alien status buttons
:returns: None
:rtype: None
:raises: None
"""
imported = self.reftrack.status() == self.reftrack.IMPORTED
alien = self.reftrack.alien()
for btn, enable in [(self.imported_tb, imported),
(self.alien_tb, alien)]:
btn.setEnabled(enable)
btn.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
def toggle_tbstyle(self, button):
"""Toogle the ToolButtonStyle of the given button between :data:`ToolButtonIconOnly` and :data:`ToolButtonTextBesideIcon`
:param button: a tool button
:type button: :class:`QtGui.QToolButton`
:returns: None
:rtype: None
:raises: None
"""
old = button.toolButtonStyle()
if old == QtCore.Qt.ToolButtonIconOnly:
new = QtCore.Qt.ToolButtonTextBesideIcon
else:
new = QtCore.Qt.ToolButtonIconOnly
button.setToolButtonStyle(new)
def set_menu(self, ):
"""Setup the menu that the menu_tb button uses
:returns: None
:rtype: None
:raises: None
"""
self.menu = QtGui.QMenu(self)
actions = self.reftrack.get_additional_actions()
self.actions = []
for a in actions:
if a.icon:
qaction = QtGui.QAction(a.icon, a.name, self)
else:
qaction = QtGui.QAction(a.name, self)
qaction.setCheckable(a.checkable)
qaction.setChecked(a.checked)
qaction.setEnabled(a.enabled)
qaction.triggered.connect(a.action)
self.actions.append(qaction)
self.menu.addAction(qaction)
self.menu_tb.setMenu(self.menu)
def get_taskfileinfo_selection(self, ):
"""Return a taskfileinfo that the user chose from the available options
:returns: the chosen taskfileinfo
:rtype: :class:`jukeboxcore.filesys.TaskFileInfo`
:raises: None
"""
sel = OptionSelector(self.reftrack)
sel.exec_()
return sel.selected
def duplicate(self, ):
"""Duplicate the current reftrack
:returns: None
:rtype: None
:raises: None
"""
self.reftrack.duplicate()
def delete(self, ):
"""Delete the current reftrack
:returns: None
:rtype: None
:raises: None
"""
self.reftrack.delete()
def load(self):
"""Load the current reftrack
:returns: None
:rtype: None
:raises: None
"""
self.reftrack.load()
def unload(self, ):
"""Unload the current reftrack
:returns: None
:rtype: None
:raises: None
"""
self.reftrack.unload()
def reference(self, ):
"""Reference a file
:returns: None
:rtype: None
:raises: None
"""
tfi = self.get_taskfileinfo_selection()
if tfi:
self.reftrack.reference(tfi)
def import_file(self, ):
"""Import a file
:returns: None
:rtype: None
:raises: NotImplementedError
"""
tfi = self.get_taskfileinfo_selection()
if tfi:
self.reftrack.import_file(tfi)
def import_reference(self, ):
"""Import the referenec of the current reftrack
:returns: None
:rtype: None
:raises: None
"""
self.reftrack.import_reference()
def replace(self, ):
"""Replace the current reftrack
:returns: None
:rtype: None
:raises: None
"""
tfi = self.get_taskfileinfo_selection()
if tfi:
self.reftrack.replace(tfi)
class ReftrackDelegate(WidgetDelegate):
"""A delegate for drawing a :class:`jukeboxcore.gui.reftrackitemdata.ReftrackItemData`
"""
def __init__(self, parent=None):
"""Initialize a new ReftrackDelegate
:param parent:
:type parent:
:raises: None
"""
super(ReftrackDelegate, self).__init__(parent)
def create_widget(self, parent=None):
"""Return a widget that should get painted by the delegate
You might want to use this in :meth:`WidgetDelegate.createEditor`
:returns: The created widget | None
:rtype: QtGui.QWidget | None
:raises: None
"""
return ReftrackWidget(parent)
def set_widget_index(self, index):
"""Set the index for the widget. The widget should retrieve data from the index and display it.
You might want use the same function as for :meth:`WidgetDelegate.setEditorData`.
:param index: the index to paint
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
self.widget.set_index(index)
def create_editor_widget(self, parent, option, index):
"""Return the editor to be used for editing the data item with the given index.
Note that the index contains information about the model being used.
The editor's parent widget is specified by parent, and the item options by option.
:param parent: the parent widget
:type parent: QtGui.QWidget
:param option: the options for painting
:type option: QtGui.QStyleOptionViewItem
:param index: the index to paint
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
return self.create_widget(parent)
def setEditorData(self, editor, index):
"""Sets the contents of the given editor to the data for the item at the given index.
Note that the index contains information about the model being used.
:param editor: the editor widget
:type editor: QtGui.QWidget
:param index: the index to paint
:type index: QtCore.QModelIndex
:returns: None
:rtype: None
:raises: None
"""
editor.set_index(index)
| {
"repo_name": "JukeboxPipeline/jukebox-core",
"path": "src/jukeboxcore/gui/widgets/reftrackwidget.py",
"copies": "1",
"size": "15322",
"license": "bsd-3-clause",
"hash": 631177368306332700,
"line_mean": 30.7883817427,
"line_max": 129,
"alpha_frac": 0.5807988513,
"autogenerated": false,
"ratio": 3.923687580025608,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5004486431325608,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pytest import fixture
from ..generic import GenericOAuthenticator
from .mocks import setup_oauth_mock
def user_model(username, **kwargs):
"""Return a user model"""
user = {
'username': username,
'scope': 'basic',
}
user.update(kwargs)
return user
def _get_authenticator(**kwargs):
return GenericOAuthenticator(
token_url='https://generic.horse/oauth/access_token',
userdata_url='https://generic.horse/oauth/userinfo',
**kwargs
)
def get_simple_handler(generic_client):
return generic_client.handler_for_user(user_model('wash'))
@fixture
def generic_client(client):
setup_oauth_mock(
client,
host='generic.horse',
access_token_path='/oauth/access_token',
user_path='/oauth/userinfo',
)
return client
@fixture
def get_authenticator(generic_client, **kwargs):
return partial(_get_authenticator, http_client=generic_client)
async def test_generic(get_authenticator, generic_client):
authenticator = get_authenticator()
handler = get_simple_handler(generic_client)
user_info = await authenticator.authenticate(handler)
assert sorted(user_info) == ['auth_state', 'name']
name = user_info['name']
assert name == 'wash'
auth_state = user_info['auth_state']
assert 'access_token' in auth_state
assert 'oauth_user' in auth_state
assert 'refresh_token' in auth_state
assert 'scope' in auth_state
async def test_generic_callable_username_key(get_authenticator, generic_client):
authenticator = get_authenticator(username_key=lambda r: r['alternate_username'])
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe')
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'zoe'
async def test_generic_callable_groups_claim_key_with_allowed_groups(
get_authenticator, generic_client
):
authenticator = get_authenticator(
scope=['openid', 'profile', 'roles'],
claim_groups_key=lambda r: r.get('policies').get('roles'),
allowed_groups=['super_user'],
)
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe', policies={'roles': ['super_user']})
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'wash'
async def test_generic_groups_claim_key_with_allowed_groups(
get_authenticator, generic_client
):
authenticator = get_authenticator(
scope=['openid', 'profile', 'roles'],
claim_groups_key='groups',
allowed_groups=['super_user'],
)
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe', groups=['super_user'])
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'wash'
async def test_generic_groups_claim_key_with_allowed_groups_unauthorized(
get_authenticator, generic_client
):
authenticator = get_authenticator(
scope=['openid', 'profile', 'roles'],
claim_groups_key='groups',
allowed_groups=['user'],
)
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe', groups=['public'])
)
user_info = await authenticator.authenticate(handler)
assert user_info is None
async def test_generic_groups_claim_key_with_allowed_groups_and_admin_groups(
get_authenticator, generic_client
):
authenticator = get_authenticator(
scope=['openid', 'profile', 'roles'],
claim_groups_key='groups',
allowed_groups=['user'],
admin_groups=['administrator'],
)
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe', groups=['user', 'administrator'])
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'wash'
assert user_info['admin'] is True
async def test_generic_groups_claim_key_with_allowed_groups_and_admin_groups_not_admin(
get_authenticator, generic_client
):
authenticator = get_authenticator(
scope=['openid', 'profile', 'roles'],
claim_groups_key='groups',
allowed_groups=['user'],
admin_groups=['administrator'],
)
handler = generic_client.handler_for_user(
user_model('wash', alternate_username='zoe', groups=['user'])
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'wash'
assert user_info['admin'] is False
async def test_generic_callable_groups_claim_key_with_allowed_groups_and_admin_groups(
get_authenticator, generic_client
):
authenticator = get_authenticator(
username_key=lambda r: r['alternate_username'],
scope=['openid', 'profile', 'roles'],
claim_groups_key=lambda r: r.get('policies').get('roles'),
allowed_groups=['user', 'public'],
admin_groups=['administrator'],
)
handler = generic_client.handler_for_user(
user_model(
'wash',
alternate_username='zoe',
policies={'roles': ['user', 'administrator']},
)
)
user_info = await authenticator.authenticate(handler)
assert user_info['name'] == 'zoe'
assert user_info['admin'] is True
| {
"repo_name": "jupyterhub/oauthenticator",
"path": "oauthenticator/tests/test_generic.py",
"copies": "1",
"size": "5347",
"license": "bsd-3-clause",
"hash": -8354341320816043000,
"line_mean": 30.6390532544,
"line_max": 88,
"alpha_frac": 0.6575649897,
"autogenerated": false,
"ratio": 3.7708039492242595,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9926310900961064,
"avg_score": 0.0004116075926392453,
"num_lines": 169
} |
from functools import partial
from pythonforandroid.toolchain import Recipe, shprint, current_directory
import sh
class OpenSSLRecipe(Recipe):
version = '1.0.2e'
url = 'https://www.openssl.org/source/openssl-{version}.tar.gz'
def should_build(self, arch):
return not self.has_libs(arch, 'libssl.so', 'libcrypto.so')
def check_symbol(self, env, sofile, symbol):
nm = env.get('NM', 'nm')
syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format(
nm, sofile), _env=env).splitlines()
if symbol in syms:
return True
print('{} missing symbol {}; rebuilding'.format(sofile, symbol))
return False
def get_recipe_env(self, arch=None):
env = super(OpenSSLRecipe, self).get_recipe_env(arch)
env['CFLAGS'] += ' ' + env['LDFLAGS']
env['CC'] += ' ' + env['LDFLAGS']
return env
def select_build_arch(self, arch):
aname = arch.arch
if 'arm64' in aname:
return 'linux-aarch64'
if 'v7a' in aname:
return 'android-armv7'
if 'arm' in aname:
return 'android'
return 'linux-armv4'
def build_arch(self, arch):
env = self.get_recipe_env(arch)
with current_directory(self.get_build_dir(arch.arch)):
# sh fails with code 255 trying to execute ./Configure
# so instead we manually run perl passing in Configure
perl = sh.Command('perl')
buildarch = self.select_build_arch(arch)
shprint(perl, 'Configure', 'shared', 'no-dso', 'no-krb5', buildarch, _env=env)
self.apply_patch('disable-sover.patch', arch.arch)
check_crypto = partial(self.check_symbol, env, 'libcrypto.so')
# check_ssl = partial(self.check_symbol, env, 'libssl.so')
while True:
shprint(sh.make, 'build_libs', _env=env)
if all(map(check_crypto, ('SSLeay', 'MD5_Transform', 'MD4_Init'))):
break
shprint(sh.make, 'clean', _env=env)
self.install_libs(arch, 'libssl.so', 'libcrypto.so')
recipe = OpenSSLRecipe()
| {
"repo_name": "bob-the-hamster/python-for-android",
"path": "pythonforandroid/recipes/openssl/__init__.py",
"copies": "1",
"size": "2168",
"license": "mit",
"hash": 7289603634022912000,
"line_mean": 35.7457627119,
"line_max": 90,
"alpha_frac": 0.5705719557,
"autogenerated": false,
"ratio": 3.5599343185550083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4630506274255008,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pythonforandroid.toolchain import Recipe, shprint, current_directory
import sh
class OpenSSLRecipe(Recipe):
version = '1.0.2g'
url = 'https://www.openssl.org/source/openssl-{version}.tar.gz'
def should_build(self, arch):
return not self.has_libs(arch, 'libssl.so', 'libcrypto.so')
def check_symbol(self, env, sofile, symbol):
nm = env.get('NM', 'nm')
syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format(
nm, sofile), _env=env).splitlines()
if symbol in syms:
return True
print('{} missing symbol {}; rebuilding'.format(sofile, symbol))
return False
def get_recipe_env(self, arch=None):
env = super(OpenSSLRecipe, self).get_recipe_env(arch)
env['CFLAGS'] += ' ' + env['LDFLAGS']
env['CC'] += ' ' + env['LDFLAGS']
return env
def select_build_arch(self, arch):
aname = arch.arch
if 'arm64' in aname:
return 'linux-aarch64'
if 'v7a' in aname:
return 'android-armv7'
if 'arm' in aname:
return 'android'
return 'linux-armv4'
def build_arch(self, arch):
env = self.get_recipe_env(arch)
with current_directory(self.get_build_dir(arch.arch)):
# sh fails with code 255 trying to execute ./Configure
# so instead we manually run perl passing in Configure
perl = sh.Command('perl')
buildarch = self.select_build_arch(arch)
shprint(perl, 'Configure', 'shared', 'no-dso', 'no-krb5', buildarch, _env=env)
self.apply_patch('disable-sover.patch', arch.arch)
check_crypto = partial(self.check_symbol, env, 'libcrypto.so')
# check_ssl = partial(self.check_symbol, env, 'libssl.so')
while True:
shprint(sh.make, 'build_libs', _env=env)
if all(map(check_crypto, ('SSLeay', 'MD5_Transform', 'MD4_Init'))):
break
shprint(sh.make, 'clean', _env=env)
self.install_libs(arch, 'libssl.so', 'libcrypto.so')
recipe = OpenSSLRecipe()
| {
"repo_name": "kived/python-for-android",
"path": "pythonforandroid/recipes/openssl/__init__.py",
"copies": "1",
"size": "2168",
"license": "mit",
"hash": -8236347342745823000,
"line_mean": 35.7457627119,
"line_max": 90,
"alpha_frac": 0.5705719557,
"autogenerated": false,
"ratio": 3.5599343185550083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4630506274255008,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from pythonforandroid.toolchain import Recipe, shprint, current_directory
import sh
class OpenSSLRecipe(Recipe):
version = '1.0.2h'
url = 'https://www.openssl.org/source/openssl-{version}.tar.gz'
def should_build(self, arch):
return not self.has_libs(arch, 'libssl' + self.version + '.so',
'libcrypto' + self.version + '.so')
def check_symbol(self, env, sofile, symbol):
nm = env.get('NM', 'nm')
syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format(
nm, sofile), _env=env).splitlines()
if symbol in syms:
return True
print('{} missing symbol {}; rebuilding'.format(sofile, symbol))
return False
def get_recipe_env(self, arch=None):
env = super(OpenSSLRecipe, self).get_recipe_env(arch)
env['OPENSSL_VERSION'] = self.version
env['CFLAGS'] += ' ' + env['LDFLAGS']
env['CC'] += ' ' + env['LDFLAGS']
env['MAKE'] = 'make' # This removes the '-j5', which isn't safe
return env
def select_build_arch(self, arch):
aname = arch.arch
if 'arm64' in aname:
return 'linux-aarch64'
if 'v7a' in aname:
return 'android-armv7'
if 'arm' in aname:
return 'android'
if 'x86' in aname:
return 'android-x86'
return 'linux-armv4'
def build_arch(self, arch):
env = self.get_recipe_env(arch)
with current_directory(self.get_build_dir(arch.arch)):
# sh fails with code 255 trying to execute ./Configure
# so instead we manually run perl passing in Configure
perl = sh.Command('perl')
buildarch = self.select_build_arch(arch)
shprint(perl, 'Configure', 'shared', 'no-dso', 'no-krb5', buildarch, _env=env)
self.apply_patch('disable-sover.patch', arch.arch)
self.apply_patch('rename-shared-lib.patch', arch.arch)
# check_ssl = partial(self.check_symbol, env, 'libssl' + self.version + '.so')
check_crypto = partial(self.check_symbol, env, 'libcrypto' + self.version + '.so')
while True:
shprint(sh.make, 'build_libs', _env=env)
if all(map(check_crypto, ('SSLeay', 'MD5_Transform', 'MD4_Init'))):
break
shprint(sh.make, 'clean', _env=env)
self.install_libs(arch, 'libssl' + self.version + '.so',
'libcrypto' + self.version + '.so')
recipe = OpenSSLRecipe()
| {
"repo_name": "ibobalo/python-for-android",
"path": "pythonforandroid/recipes/openssl/__init__.py",
"copies": "1",
"size": "2597",
"license": "mit",
"hash": 1629172824179409400,
"line_mean": 38.3484848485,
"line_max": 94,
"alpha_frac": 0.554871005,
"autogenerated": false,
"ratio": 3.6220362622036264,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4676907267203626,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ray.rllib.utils.annotations import override, PublicAPI, DeveloperAPI
from ray.rllib.utils.framework import try_import_tf, try_import_tfp, \
try_import_torch
from ray.rllib.utils.deprecation import deprecation_warning, renamed_agent, \
renamed_class, renamed_function
from ray.rllib.utils.filter_manager import FilterManager
from ray.rllib.utils.filter import Filter
from ray.rllib.utils.numpy import sigmoid, softmax, relu, one_hot, fc, lstm, \
SMALL_NUMBER, LARGE_INTEGER, MIN_LOG_NN_OUTPUT, MAX_LOG_NN_OUTPUT
from ray.rllib.utils.schedules import LinearSchedule, PiecewiseSchedule, \
PolynomialSchedule, ExponentialSchedule, ConstantSchedule
from ray.rllib.utils.test_utils import check, check_compute_single_action, \
framework_iterator
from ray.tune.utils import merge_dicts, deep_update
def add_mixins(base, mixins):
"""Returns a new class with mixins applied in priority order."""
mixins = list(mixins or [])
while mixins:
class new_base(mixins.pop(), base):
pass
base = new_base
return base
def force_list(elements=None, to_tuple=False):
"""
Makes sure `elements` is returned as a list, whether `elements` is a single
item, already a list, or a tuple.
Args:
elements (Optional[any]): The inputs as single item, list, or tuple to
be converted into a list/tuple. If None, returns empty list/tuple.
to_tuple (bool): Whether to use tuple (instead of list).
Returns:
Union[list,tuple]: All given elements in a list/tuple depending on
`to_tuple`'s value. If elements is None,
returns an empty list/tuple.
"""
ctor = list
if to_tuple is True:
ctor = tuple
return ctor() if elements is None else ctor(elements) \
if type(elements) in [list, tuple] else ctor([elements])
class NullContextManager:
"""No-op context manager"""
def __init__(self):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
force_tuple = partial(force_list, to_tuple=True)
__all__ = [
"add_mixins",
"check",
"check_compute_single_action",
"deep_update",
"deprecation_warning",
"fc",
"force_list",
"force_tuple",
"framework_iterator",
"lstm",
"merge_dicts",
"one_hot",
"override",
"relu",
"renamed_function",
"renamed_agent",
"renamed_class",
"sigmoid",
"softmax",
"try_import_tf",
"try_import_tfp",
"try_import_torch",
"ConstantSchedule",
"DeveloperAPI",
"ExponentialSchedule",
"Filter",
"FilterManager",
"LARGE_INTEGER",
"LinearSchedule",
"MAX_LOG_NN_OUTPUT",
"MIN_LOG_NN_OUTPUT",
"PiecewiseSchedule",
"PolynomialSchedule",
"PublicAPI",
"SMALL_NUMBER",
]
| {
"repo_name": "ray-project/ray",
"path": "rllib/utils/__init__.py",
"copies": "1",
"size": "2849",
"license": "apache-2.0",
"hash": -7719769847469798000,
"line_mean": 25.6261682243,
"line_max": 79,
"alpha_frac": 0.6507546508,
"autogenerated": false,
"ratio": 3.657252888318357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9808007539118357,
"avg_score": 0,
"num_lines": 107
} |
from functools import partial
from rb.utils import iteritems
class Promise(object):
"""A promise object that attempts to mirror the ES6 APIs for promise
objects. Unlike ES6 promises this one however also directly gives
access to the underlying value and it has some slightly different
static method names as this promise can be resolved externally.
"""
__slots__ = ("value", "reason", "_state", "_callbacks", "_errbacks")
def __init__(self):
#: the value that this promise holds if it's resolved.
self.value = None
#: the reason for this promise if it's rejected.
self.reason = None
self._state = "pending"
self._callbacks = []
self._errbacks = []
@staticmethod
def resolved(value):
"""Creates a promise object resolved with a certain value."""
p = Promise()
p._state = "resolved"
p.value = value
return p
@staticmethod
def rejected(reason):
"""Creates a promise object rejected with a certain value."""
p = Promise()
p._state = "rejected"
p.reason = reason
return p
@staticmethod
def all(iterable_or_dict):
"""A promise that resolves when all passed promises resolve. You can
either pass a list or a dictionary of promises.
"""
if isinstance(iterable_or_dict, dict):
return _promise_from_dict(iterable_or_dict)
return _promise_from_iterable(iterable_or_dict)
def resolve(self, value):
"""Resolves the promise with the given value."""
if self is value:
raise TypeError("Cannot resolve promise with itself.")
if isinstance(value, Promise):
value.done(self.resolve, self.reject)
return
if self._state != "pending":
raise RuntimeError("Promise is no longer pending.")
self.value = value
self._state = "resolved"
callbacks = self._callbacks
self._callbacks = None
for callback in callbacks:
callback(value)
def reject(self, reason):
"""Rejects the promise with the given reason."""
if self._state != "pending":
raise RuntimeError("Promise is no longer pending.")
self.reason = reason
self._state = "rejected"
errbacks = self._errbacks
self._errbacks = None
for errback in errbacks:
errback(reason)
@property
def is_pending(self):
"""`True` if the promise is still pending, `False` otherwise."""
return self._state == "pending"
@property
def is_resolved(self):
"""`True` if the promise was resolved, `False` otherwise."""
return self._state == "resolved"
@property
def is_rejected(self):
"""`True` if the promise was rejected, `False` otherwise."""
return self._state == "rejected"
def done(self, on_success=None, on_failure=None):
"""Attaches some callbacks to the promise and returns the promise."""
if on_success is not None:
if self._state == "pending":
self._callbacks.append(on_success)
elif self._state == "resolved":
on_success(self.value)
if on_failure is not None:
if self._state == "pending":
self._errbacks.append(on_failure)
elif self._state == "rejected":
on_failure(self.reason)
return self
def then(self, success=None, failure=None):
"""A utility method to add success and/or failure callback to the
promise which will also return another promise in the process.
"""
rv = Promise()
def on_success(v):
try:
rv.resolve(success(v))
except Exception as e:
rv.reject(e)
def on_failure(r):
try:
rv.resolve(failure(r))
except Exception as e:
rv.reject(e)
self.done(on_success, on_failure)
return rv
def __repr__(self):
if self._state == "pending":
v = "(pending)"
elif self._state == "rejected":
v = repr(self.reason) + " (rejected)"
else:
v = repr(self.value)
return "<%s %s>" % (self.__class__.__name__, v,)
def _ensure_promise(value):
return value if isinstance(value, Promise) else Promise.resolved(value)
def _promise_from_iterable(iterable):
l = [_ensure_promise(x) for x in iterable]
if not l:
return Promise.resolved([])
pending = set(l)
rv = Promise()
def on_success(promise, value):
pending.discard(promise)
if not pending:
rv.resolve([p.value for p in l])
for promise in l:
promise.done(partial(on_success, promise), rv.reject)
return rv
def _promise_from_dict(d):
d = dict((k, _ensure_promise(v)) for k, v in iteritems(d))
if not d:
return Promise.resolved({})
pending = set(d.keys())
rv = Promise()
def on_success(key, value):
pending.discard(key)
if not pending:
rv.resolve(dict((k, p.value) for k, p in iteritems(d)))
for key, promise in iteritems(d):
promise.done(partial(on_success, key), rv.reject)
return rv
| {
"repo_name": "getsentry/rb",
"path": "rb/promise.py",
"copies": "1",
"size": "5350",
"license": "apache-2.0",
"hash": 7046956473560502000,
"line_mean": 28.8882681564,
"line_max": 77,
"alpha_frac": 0.5757009346,
"autogenerated": false,
"ratio": 4.144074360960496,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5219775295560496,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from rest_framework import fields, serializers
from access import acl
from users.models import UserProfile
from mkt.api.serializers import PotatoCaptchaSerializer
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('display_name',)
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
attrs['user'] = self.request.amo_user
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class NewsletterSerializer(serializers.Serializer):
email = fields.EmailField()
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.amo_user.is_app_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': allowed('Collections', 'Curate'),
'reviewer': acl.action_allowed(request, 'Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure')
and allowed('ProductIcon', 'Create')),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
| {
"repo_name": "robhudson/zamboni",
"path": "mkt/account/serializers.py",
"copies": "1",
"size": "2881",
"license": "bsd-3-clause",
"hash": -1700625465175926,
"line_mean": 31.7386363636,
"line_max": 74,
"alpha_frac": 0.6560222145,
"autogenerated": false,
"ratio": 4.432307692307693,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 88
} |
from functools import partial
from rest_framework import fields, serializers
from mkt.access import acl
from mkt.api.fields import ReverseChoiceField
from mkt.api.serializers import PotatoCaptchaSerializer
from mkt.users.models import UserProfile
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('display_name',)
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
if self.request.user.is_authenticated():
attrs['user'] = self.request.user
else:
attrs['user'] = None
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class FxaLoginSerializer(serializers.Serializer):
auth_response = fields.CharField(required=True)
state = fields.CharField(required=True)
class NewsletterSerializer(serializers.Serializer):
NEWSLETTER_CHOICES_API = {
# string passed to the API : actual string passed to basket.
'about:apps': 'mozilla-and-you,marketplace-desktop',
'marketplace': 'marketplace'
}
email = fields.EmailField()
newsletter = fields.ChoiceField(required=False, default='marketplace',
choices=NEWSLETTER_CHOICES_API.items())
def transform_newsletter(self, obj, value):
# Transform from the string the API receives to the one we need to pass
# to basket.
default = self.fields['newsletter'].default
return self.NEWSLETTER_CHOICES_API.get(value, default)
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.user.is_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': allowed('Collections', 'Curate') or
allowed('Feed', 'Curate'),
'reviewer': acl.action_allowed(request, 'Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure')
and allowed('ProductIcon', 'Create')),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
| {
"repo_name": "andymckay/zamboni",
"path": "mkt/account/serializers.py",
"copies": "1",
"size": "3842",
"license": "bsd-3-clause",
"hash": -6959055076883101000,
"line_mean": 33.9272727273,
"line_max": 79,
"alpha_frac": 0.6520041645,
"autogenerated": false,
"ratio": 4.411021814006888,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5563025978506888,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from rest_framework import fields, serializers
import amo
from mkt.access import acl
from mkt.api.serializers import PotatoCaptchaSerializer
from mkt.users.models import UserProfile
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ['display_name']
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class AccountInfoSerializer(serializers.ModelSerializer):
ALLOWED_SOURCES = [amo.LOGIN_SOURCE_UNKNOWN, amo.LOGIN_SOURCE_FXA]
source = serializers.CharField(read_only=True)
verified = serializers.BooleanField(source='is_verified', read_only=True)
class Meta:
model = UserProfile
fields = ['source', 'verified']
def transform_source(self, obj, value):
"""Return the sources slug instead of the id."""
if obj.source in self.ALLOWED_SOURCES:
return amo.LOGIN_SOURCE_LOOKUP[value]
else:
return amo.LOGIN_SOURCE_LOOKUP[amo.LOGIN_SOURCE_UNKNOWN]
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
if self.request.user.is_authenticated():
attrs['user'] = self.request.user
else:
attrs['user'] = None
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class FxALoginSerializer(serializers.Serializer):
auth_response = fields.CharField(required=True)
state = fields.CharField(required=True)
class NewsletterSerializer(serializers.Serializer):
NEWSLETTER_CHOICES_API = {
# string passed to the API : actual string passed to basket.
'about:apps': 'mozilla-and-you,marketplace-desktop',
'marketplace': 'marketplace'
}
email = fields.EmailField()
newsletter = fields.ChoiceField(required=False, default='marketplace',
choices=NEWSLETTER_CHOICES_API.items())
def transform_newsletter(self, obj, value):
# Transform from the string the API receives to the one we need to pass
# to basket.
default = self.fields['newsletter'].default
return self.NEWSLETTER_CHOICES_API.get(value, default)
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.user.is_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': allowed('Collections', 'Curate') or
allowed('Feed', 'Curate'),
'reviewer': acl.action_allowed(request, 'Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure')
and allowed('ProductIcon', 'Create')),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
| {
"repo_name": "ngokevin/zamboni",
"path": "mkt/account/serializers.py",
"copies": "1",
"size": "4435",
"license": "bsd-3-clause",
"hash": 7605124059289817000,
"line_mean": 33.3798449612,
"line_max": 79,
"alpha_frac": 0.6532130778,
"autogenerated": false,
"ratio": 4.352306182531894,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001550387596899225,
"num_lines": 129
} |
from functools import partial
from rest_framework import fields, serializers
import mkt
from mkt.access import acl
from mkt.access.models import Group
from mkt.api.serializers import PotatoCaptchaSerializer
from mkt.users.models import UserProfile
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ['display_name', 'enable_recommendations']
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class AccountInfoSerializer(serializers.ModelSerializer):
ALLOWED_SOURCES = [mkt.LOGIN_SOURCE_FXA]
source = serializers.CharField(read_only=True)
verified = serializers.BooleanField(source='is_verified', read_only=True)
class Meta:
model = UserProfile
fields = ['source', 'verified']
def transform_source(self, obj, value):
"""Return the sources slug instead of the id."""
if obj.pk is None:
return mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_UNKNOWN]
elif obj.source in self.ALLOWED_SOURCES:
return mkt.LOGIN_SOURCE_LOOKUP[value]
else:
return mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_BROWSERID]
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
if self.request.user.is_authenticated():
attrs['user'] = self.request.user
else:
attrs['user'] = None
return attrs
def validate_feedback(self, attrs, source):
# ensure feedback is not submitted with only white spaces
if not attrs[source].strip():
raise serializers.ValidationError('Feedback can\'t be blank')
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class FxALoginSerializer(serializers.Serializer):
auth_response = fields.CharField(required=True)
state = fields.CharField(required=True)
class NewsletterSerializer(serializers.Serializer):
NEWSLETTER_CHOICES_API = {
# string passed to the API : actual string passed to basket.
'about:apps': 'mozilla-and-you,marketplace-desktop',
'marketplace-firefoxos': 'marketplace',
'marketplace-desktop': 'mozilla-and-you',
'marketplace-android': 'mozilla-and-you'
}
email = fields.EmailField()
newsletter = fields.ChoiceField(
default='marketplace-firefoxos',
required=False,
choices=NEWSLETTER_CHOICES_API.items())
lang = fields.CharField()
def transform_newsletter(self, obj, value):
# Transform from the string the API receives to the one we need to pass
# to basket.
default = self.fields['newsletter'].default
return self.NEWSLETTER_CHOICES_API.get(value, default)
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.user.is_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': (
allowed('Collections', 'Curate') or
allowed('Feed', 'Curate')
),
'reviewer': allowed('Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure') and
allowed('ProductIcon', 'Create')),
'website_submitter': allowed('Websites', 'Submit'),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
'content_tools_login': allowed('ContentTools', 'Login'),
'content_tools_addon_submit': allowed('ContentTools',
'AddonSubmit'),
'content_tools_addon_review': allowed('ContentTools',
'AddonReview'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
class GroupsSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ('id', 'name', 'restricted')
read_only_fields = ('id', 'name', 'restricted')
class TOSSerializer(serializers.Serializer):
has_signed = fields.SerializerMethodField('get_has_signed')
def get_has_signed(self, obj):
return (self.context['request'].user.read_dev_agreement is not None)
| {
"repo_name": "elysium001/zamboni",
"path": "mkt/account/serializers.py",
"copies": "1",
"size": "5781",
"license": "bsd-3-clause",
"hash": 6681262059321466000,
"line_mean": 33.6167664671,
"line_max": 79,
"alpha_frac": 0.6402006573,
"autogenerated": false,
"ratio": 4.36631419939577,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 167
} |
from functools import partial
from rest_framework import fields, serializers
import mkt
from mkt.access import acl
from mkt.api.serializers import PotatoCaptchaSerializer
from mkt.users.models import UserProfile
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ['display_name', 'enable_recommendations']
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class AccountInfoSerializer(serializers.ModelSerializer):
ALLOWED_SOURCES = [mkt.LOGIN_SOURCE_FXA]
source = serializers.CharField(read_only=True)
verified = serializers.BooleanField(source='is_verified', read_only=True)
class Meta:
model = UserProfile
fields = ['source', 'verified']
def transform_source(self, obj, value):
"""Return the sources slug instead of the id."""
if obj.pk is None:
return mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_UNKNOWN]
elif obj.source in self.ALLOWED_SOURCES:
return mkt.LOGIN_SOURCE_LOOKUP[value]
else:
return mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_BROWSERID]
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
if self.request.user.is_authenticated():
attrs['user'] = self.request.user
else:
attrs['user'] = None
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class FxALoginSerializer(serializers.Serializer):
auth_response = fields.CharField(required=True)
state = fields.CharField(required=True)
class NewsletterSerializer(serializers.Serializer):
NEWSLETTER_CHOICES_API = {
# string passed to the API : actual string passed to basket.
'about:apps': 'mozilla-and-you,marketplace-desktop',
'marketplace-firefoxos': 'marketplace',
'marketplace-desktop': 'mozilla-and-you',
'marketplace-android': 'mozilla-and-you'
}
email = fields.EmailField()
newsletter = fields.ChoiceField(
default='marketplace-firefoxos',
required=False,
choices=NEWSLETTER_CHOICES_API.items())
lang = fields.CharField()
def transform_newsletter(self, obj, value):
# Transform from the string the API receives to the one we need to pass
# to basket.
default = self.fields['newsletter'].default
return self.NEWSLETTER_CHOICES_API.get(value, default)
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.user.is_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': (
allowed('Collections', 'Curate') or
allowed('Feed', 'Curate')
),
'reviewer': allowed('Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure') and
allowed('ProductIcon', 'Create')),
'website_submitter': allowed('Websites', 'Submit'),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
| {
"repo_name": "jamesthechamp/zamboni",
"path": "mkt/account/serializers.py",
"copies": "6",
"size": "4743",
"license": "bsd-3-clause",
"hash": 4410085933117730300,
"line_mean": 33.1223021583,
"line_max": 79,
"alpha_frac": 0.6495888678,
"autogenerated": false,
"ratio": 4.296195652173913,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7945784519973913,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from rest_framework import status
from rest_framework.test import APITestCase
from pprint import pformat
from collections import OrderedDict
from rest_framework.utils.serializer_helpers import ReturnList
def compare_lists(data, expected_data):
data_gen = (item for item in data)
expected_data_gen = (item for item in expected_data)
for value in expected_data_gen:
if value is ...:
try:
next_value = expected_data_gen.send(None)
except StopIteration:
# last item is ellipsis
return True
if next_value is ...:
raise TypeError('Consecutively usage of ... (Ellipsis) is not allowed in list.')
try:
while not compare(data_gen.send(None), next_value):
pass
except StopIteration:
# next expected item is not in data
return False
else:
try:
data_item = data_gen.send(None)
except StopIteration:
# there are more expected items
return False
if not compare(data_item, value):
# expected item is not in data
return False
try:
data_gen.send(None)
except StopIteration:
return True
else:
# More items in data
return False
def compare_dicts(data, expected_data):
subset = False
# subset
if ... in expected_data:
if expected_data[...] is ...:
subset = True
else:
raise TypeError('Bad usage of ... (Ellipsis).')
compared_keys = []
for key, value in expected_data.items():
if key is ...:
continue
if value is ...:
if key not in data:
# Key is not found in data
return False
else:
compared_keys.append(key)
else:
if key in data:
if compare(data[key], expected_data[key]):
compared_keys.append(key)
else:
# values are not the same
return False
else:
# Key is not found in data
return False
if not subset:
if len(compared_keys) != len(data):
# More items in data
return False
return True
def compare(data, expected_data):
# if expected_data is type, only test if type of data is the same
if isinstance(expected_data, type) and type(data) == expected_data:
return True
expected_data_type = type(expected_data)
if expected_data_type != type(data):
# different types
return False
if expected_data_type == list:
return compare_lists(data, expected_data)
elif expected_data_type == dict:
return compare_dicts(data, expected_data)
else:
return data == expected_data
def convert_data(data):
if type(data) == list or isinstance(data, ReturnList):
return [convert_data(item) for item in data]
elif type(data) == dict or isinstance(data, OrderedDict):
return {key: convert_data(value) for key, value in data.items()}
else:
return data
class BaseAPITestCase(APITestCase):
def _request(self, method, url, data=None):
# TODO add URL to assert message
# print("Tested url: '{url}'".format(url=url))
response = getattr(self.client, method)(url, data=data, format='json')
return response
def _get(self, url, data=None):
return self._request('get', url, data=data)
def _post(self, url, data=None):
return self._request('post', url, data=data)
def _put(self, url, data=None):
return self._request('put', url, data=data)
def _delete(self, url, data=None):
return self._request('delete', url, data=data)
def _patch(self, url, data=None):
return self._request('patch', url, data=data)
# assert methods
def assert_disabled(self, status_code, msg):
expected_status_codes = (
status.HTTP_401_UNAUTHORIZED,
status.HTTP_404_NOT_FOUND,
status.HTTP_405_METHOD_NOT_ALLOWED,
status.HTTP_403_FORBIDDEN
)
# TODO refactor - reverse msg composing
msg = "{msg}\n{info}".format(
msg=msg,
info=pformat(dict(
response_status_code=status_code,
expected_status_codes=expected_status_codes
))
)
assert status_code in expected_status_codes, msg
def assert_compare(self, data, expected_data, msg):
data = convert_data(data)
# TODO refactor - reverse msg composing
msg = "{msg}\n{info}".format(
msg=msg,
info=pformat(dict(
response_data=data,
expected_data=expected_data
))
)
assert compare(data, expected_data), msg
def assert_status_code(self, response_status_code, expected_status_code, msg):
# TODO refactor - reverse msg composing
msg = """{msg}
Response output data is empty.
Expected response status code was '{expected_status_code}' but got '{response_status_code}'.""".format(
msg=msg,
response_status_code=response_status_code,
expected_status_code=expected_status_code
)
assert response_status_code == expected_status_code, msg
url = ''
url_detail = ''
def create(self, input_data=None):
return self._post(self.url, data=input_data)
def retrieve(self, input_data=None):
return self._get(self.url_detail, data=input_data)
def update(self, input_data=None):
return self._put(self.url_detail, data=input_data)
def delete(self, input_data=None):
return self._delete(self.url_detail, data=input_data)
def list(self, input_data=None):
return self._get(self.url, data=input_data)
def patch(self, input_data=None):
return self._patch(self.url_detail, data=input_data)
def login(self, user):
self.client.force_authenticate(user=user)
OPERATIONS = ('create', 'retrieve', 'update', 'delete', 'patch', 'list')
class AllRestUsers():
def _decorator(self, operation):
def class_wrapper(cls):
for rest_user in cls.rest_users:
rest_user.allowed_operations.add(operation)
return cls
return class_wrapper
def __getattr__(self, name):
for operation in OPERATIONS:
if name == 'can_{operation}'.format(operation=operation):
return self._decorator(operation)
raise AttributeError
class MetaRestTestCase(type):
@property
def rest_users(self):
yield from (rest_user for rest_user in self._rest_users)
@property
def test_names(self):
if not self.__test__:
raise StopIteration()
for rest_user in self.rest_users:
for operation in OPERATIONS:
yield 'test_{operation}_by_{rest_user.name}'.format(
operation=operation, rest_user=rest_user
), rest_user, operation
def __getattr__(self, attr_name):
for test_name, rest_user, operation in self.test_names:
if test_name == attr_name:
return lambda s: True
raise AttributeError()
def __init__(cls, name, bases, attrs):
rest_users = set()
rest_users_names = set()
for base in bases:
rest_users_names |= getattr(base, '_rest_users_names', set())
for attr, value in attrs.items():
if isinstance(value, type) and issubclass(value, RestUser):
rest_users_names.add(attr)
elif isinstance(value, RestUser):
if value.name is None:
value.name = attr
rest_users.add(value)
for rest_user_name in rest_users_names:
rest_user = RestUser(name=rest_user_name)
rest_users.add(rest_user)
setattr(cls, rest_user_name, rest_user)
# pytest compatible support for excluding whole TestCase - ie. for inheritance and for some test suits
if '__test__' not in attrs:
cls.__test__ = attrs.get('_{}__test'.format(name), True)
cls._rest_users_names = rest_users_names
cls._rest_users = rest_users
super().__init__(name, bases, attrs)
def __dir__(self):
return [test_name for test_name, rest_user, operation in self.test_names] + super().__dir__()
class RestUser(object):
def __init__(self, name=None, user=None, **kwargs):
self.name = name
self.bound_user = user
self.allowed_operations = set()
for operation in OPERATIONS:
kwarg = 'can_{}'.format(operation)
if kwargs.get(kwarg, False):
self.allowed_operations.add(operation)
def __eq__(self, other):
return self.name == other.name
def __hash__(self):
return id(self.name)
def bind_user(self, user):
self.bound_user = user
def _decorator(self, operation):
def class_wrapper(cls):
getattr(cls, self.name).allowed_operations.add(operation)
return cls
return class_wrapper
def __getattr__(self, name):
for operation in OPERATIONS:
if name == 'can_{operation}'.format(operation=operation):
return self._decorator(operation)
raise AttributeError
def can(self, operation):
return operation in self.allowed_operations
class RestTestCase(BaseAPITestCase, metaclass=MetaRestTestCase):
all_users = AllRestUsers()
anonymous_user = RestUser
output_status_create = status.HTTP_201_CREATED
__test = False
def _get_input_data(self, rest_user, operation):
return getattr(
self,
'input_{operation}_{rest_user.name}'.format(operation=operation, rest_user=rest_user),
getattr(self, 'input_{operation}'.format(operation=operation), None)
)
def _get_output_data(self, rest_user, operation):
return getattr(
self,
'output_{operation}_{rest_user.name}'.format(operation=operation, rest_user=rest_user),
getattr(self, 'output_{operation}'.format(operation=operation), None)
)
def _get_output_status(self, rest_user, operation):
return getattr(
self,
'output_status_{operation}_{rest_user.name}'.format(operation=operation, rest_user=rest_user),
getattr(self, 'output_status_{operation}'.format(operation=operation), status.HTTP_200_OK)
)
def _test(self, rest_user=None, operation=''):
msg = "Operation '{operation}' for '{rest_user.name}' is enabled.".format(
operation=operation, rest_user=rest_user
)
if rest_user is not None:
self.login(rest_user.bound_user)
input_data = self._get_input_data(rest_user, operation)
expected_output_data = self._get_output_data(rest_user, operation)
response = getattr(self, operation)(input_data)
response_status_code = response.status_code
if expected_output_data is None:
# TODO assert if it is defined some expected response status code
self.assert_status_code(response_status_code, status.HTTP_204_NO_CONTENT, msg)
else:
expected_status_code = self._get_output_status(rest_user, operation)
self.assert_status_code(response_status_code, expected_status_code, msg)
# TODO - maybe: if hasattr(response, 'data') else None
self.assert_compare(response.data, expected_output_data, msg)
def _get_test(self, rest_user, operation):
if rest_user.can(operation):
return partial(self._test, rest_user=rest_user, operation=operation)
else:
return partial(self._test_disabled, rest_user=rest_user, operation=operation)
def _test_disabled(self, rest_user=None, operation=''):
msg = "Operation '{operation}' for '{rest_user.name}' is disabled.".format(
operation=operation, rest_user=rest_user
)
if rest_user.bound_user is not None:
self.login(rest_user.bound_user)
# no input data
input_data_list = [None]
# input data for allowed users
for another_rest_user in self.__class__.rest_users:
if rest_user != another_rest_user and another_rest_user.can(operation):
another_input_data = self._get_input_data(another_rest_user, operation)
if another_input_data not in input_data_list:
input_data_list.append(another_input_data)
for input_data in input_data_list:
response = getattr(self, operation)(input_data)
self.assert_disabled(response.status_code, msg)
def __getattr__(self, attr_name):
for test_name, rest_user, operation in self.__class__.test_names:
if test_name == attr_name:
return self._get_test(rest_user, operation)
raise AttributeError() | {
"repo_name": "baseclue/django-rest-test",
"path": "rest_test/__init__.py",
"copies": "1",
"size": "13329",
"license": "apache-2.0",
"hash": -7768518745897162000,
"line_mean": 31.512195122,
"line_max": 111,
"alpha_frac": 0.5884912597,
"autogenerated": false,
"ratio": 4.151043288695111,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006168443627651909,
"num_lines": 410
} |
from functools import partial
from rlkit.envs.contextual import ContextualEnv
from rlkit.policies.base import Policy
from rlkit.samplers.data_collector import MdpPathCollector
from rlkit.samplers.rollout_functions import contextual_rollout
class ContextualPathCollector(MdpPathCollector):
def __init__(
self,
env: ContextualEnv,
policy: Policy,
max_num_epoch_paths_saved=None,
observation_key='observation',
context_keys_for_policy='context',
render=False,
render_kwargs=None,
**kwargs
):
rollout_fn = partial(
contextual_rollout,
context_keys_for_policy=context_keys_for_policy,
observation_key=observation_key,
)
super().__init__(
env, policy, max_num_epoch_paths_saved, render, render_kwargs,
rollout_fn=rollout_fn,
**kwargs
)
self._observation_key = observation_key
self._context_keys_for_policy = context_keys_for_policy
def get_snapshot(self):
snapshot = super().get_snapshot()
snapshot.update(
observation_key=self._observation_key,
context_keys_for_policy=self._context_keys_for_policy,
)
return snapshot
| {
"repo_name": "vitchyr/rlkit",
"path": "rlkit/samplers/data_collector/contextual_path_collector.py",
"copies": "1",
"size": "1313",
"license": "mit",
"hash": -4706289876643138000,
"line_mean": 31.825,
"line_max": 74,
"alpha_frac": 0.6138613861,
"autogenerated": false,
"ratio": 4.181528662420382,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5295390048520382,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from scapy.all import ARP, Ether, sendp
from sleepproxy.sniff import SnifferThread
_HOSTS = {}
def handle(othermac, addresses, mymac, iface):
print 'Pretending to handle arp for %s on %s' % (addresses, iface)
if othermac in _HOSTS:
print "I already seem to be managing %s, ignoring"
return
for address in addresses:
if ':' in address:
# TODO: Handle IP6
continue
thread = SnifferThread(
filterexp="arp host %s" % (address, ),
prn=partial(_handle_packet, address, mymac),
iface=iface,
)
_HOSTS[othermac] = thread
thread.start()
def forget(mac):
print "Pretending to forget %s in ARP" % (mac, )
if mac not in _HOSTS:
print "I don't seem to be managing %s" % (mac, )
return
_HOSTS[mac].stop()
del _HOSTS[mac]
def _handle_packet(address, mac, packet):
if ARP not in packet:
# I don't know how this happens, but I've seen it
return
if packet[ARP].op != ARP.who_has:
return
# TODO: Should probably handle is-at by deregistering!
if packet[ARP].pdst != address:
print "Skipping packet with pdst %s != %s" % (packet[ARP].pdst, address, )
return
ether = packet[Ether]
arp = packet[ARP]
reply = Ether(
dst=ether.src, src=mac) / ARP(
op="is-at",
psrc=arp.pdst,
pdst=arp.psrc,
hwsrc=mac,
hwdst=packet[ARP].hwsrc)
print "Sending ARP response for %s" % (arp.pdst, )
sendp(reply)
| {
"repo_name": "rcloran/SleepProxyServer",
"path": "sleepproxy/arp.py",
"copies": "1",
"size": "1609",
"license": "bsd-2-clause",
"hash": 6672050711540156000,
"line_mean": 26.7413793103,
"line_max": 82,
"alpha_frac": 0.5748912368,
"autogenerated": false,
"ratio": 3.4527896995708156,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4527680936370816,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from scapy.all import IP, TCP
import sleepproxy.manager
from sleepproxy.sniff import SnifferThread
from sleepproxy.wol import wake
_HOSTS = {}
def handle(mac, addresses, iface):
print "Pretending to handle incoming SYN for %s: %s" % (mac, addresses, )
if mac in _HOSTS:
print "Ignoring already managed host %s" % (mac, )
for address in addresses:
if ':' in address:
# TODO: Handle IP6
continue
print 'Starting TCP sniffer for %s' % (address, )
thread = SnifferThread(
filterexp="tcp[tcpflags] & tcp-syn != 0 and tcp[tcpflags] & tcp-ack = 0 and dst host %s" % (address, ),
prn=partial(_handle_packet, mac, address),
iface=iface,
)
_HOSTS[mac] = thread
thread.start()
def forget(mac):
print "Pretending to forget host %s in TCP handler" % (mac, )
if mac not in _HOSTS:
print "I don't seem to know about %s, ignoring" % (mac, )
return
_HOSTS[mac].stop()
del _HOSTS[mac]
def _handle_packet(mac, address, packet):
"""Do something with a SYN for the other machine!"""
if not (IP in packet and TCP in packet):
return
if packet[IP].dst != address:
print "Sniffed a TCP SYN for the wrong address!?"
print packet.show()
return
wake(mac)
# TODO: Check if it awoke?
sleepproxy.manager.forget_host(mac)
| {
"repo_name": "rcloran/SleepProxyServer",
"path": "sleepproxy/tcp.py",
"copies": "1",
"size": "1445",
"license": "bsd-2-clause",
"hash": -5440300622273619000,
"line_mean": 28.4897959184,
"line_max": 115,
"alpha_frac": 0.6069204152,
"autogenerated": false,
"ratio": 3.5591133004926108,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46660337156926107,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from six import iteritems
from bravado_core.docstring import docstring_property
from bravado_core.schema import (
is_dict_like,
is_list_like,
SWAGGER_PRIMITIVES
)
# Models in #/definitions are tagged with this key so that they can be
# differentiated from 'object' types.
MODEL_MARKER = 'x-model'
def build_models(definitions_spec):
"""Builds the models contained in a #/definitions dict. This applies
to more than just definitions - generalize later.
:param definitions_spec: spec['definitions'] in dict form
:returns: dict where (name,value) = (model name, model type)
"""
models = {}
for model_name, model_spec in iteritems(definitions_spec):
# make models available under both simple name and $ref style name
# - Pet <-- TODO: remove eventually
# - #/definitions/Pet
models[model_name] = create_model_type(model_name, model_spec)
models['#/definitions/{0}'.format(model_name)] = models[model_name]
return models
def create_model_type(model_name, model_spec):
"""Create a dynamic class from the model data defined in the swagger
spec.
The docstring for this class is dynamically generated because generating
the docstring is relatively expensive, and would only be used in rare
cases for interactive debugging in a REPL.
:param model_name: model name
:param model_spec: json-like dict that describes a model.
:returns: dynamic type created with attributes, docstrings attached
:rtype: type
"""
methods = dict(
__doc__=docstring_property(partial(create_model_docstring, model_spec)),
__eq__=lambda self, other: compare(self, other),
__init__=lambda self, **kwargs: model_constructor(self, model_spec,
kwargs),
__repr__=lambda self: create_model_repr(self, model_spec),
__dir__=lambda self: model_dir(self, model_spec),
)
return type(str(model_name), (object,), methods)
def model_dir(model, model_spec):
"""Responsible for returning the names of the valid attributes on this
model object. This includes any properties defined in this model's spec
plus additional attibutes that exist as `additionalProperties`.
:param model: instance of a model
:param model_spec: spec the passed in model in dict form
:returns: list of str
"""
return list(model_spec['properties'].keys()) + model._additional_props
def compare(first, second):
"""Compares two model types for equivalence.
TODO: If a type composes another model type, .__dict__ recurse on those
and compare again on those dict values.
:param first: generated model type
:type first: type
:param second: generated model type
:type second: type
:returns: True if equivalent, False otherwise
"""
if not hasattr(first, '__dict__') or not hasattr(second, '__dict__'):
return False
# Ignore any '_raw' keys
def norm_dict(d):
return dict((k, d[k]) for k in d if k != '_raw')
return norm_dict(first.__dict__) == norm_dict(second.__dict__)
def model_constructor(model, model_spec, constructor_kwargs):
"""Constructor for the given model instance. Just assigns kwargs as attrs
on the model based on the 'properties' in the model specification.
:param model: Instance of a model type
:type model: type
:param model_spec: model specification
:type model_spec: dict
:param constructor_kwargs: kwargs sent in to the constructor invocation
:type constructor_kwargs: dict
:raises: AttributeError on constructor_kwargs that don't exist in the
model specification's list of properties
"""
arg_names = list(constructor_kwargs.keys())
for attr_name, attr_spec in iteritems(model_spec['properties']):
if attr_name in arg_names:
attr_value = constructor_kwargs[attr_name]
arg_names.remove(attr_name)
else:
attr_value = None
setattr(model, attr_name, attr_value)
if arg_names and not model_spec.get('additionalProperties', True):
raise AttributeError(
"Model {0} does not have attributes for: {1}"
.format(type(model), arg_names))
# we've got additionalProperties to set on the model
for arg_name in arg_names:
setattr(model, arg_name, constructor_kwargs[arg_name])
# stash so that dir(model) works
model._additional_props = arg_names
def create_model_repr(model, model_spec):
"""Generates the repr string for the model.
:param model: Instance of a model
:param model_spec: model specification
:type model_spec: dict
:returns: repr string for the model
"""
s = [
"{0}={1!r}".format(attr_name, getattr(model, attr_name))
for attr_name in sorted(model_spec['properties'].keys())
]
return "{0}({1})".format(model.__class__.__name__, ', '.join(s))
def tag_models(spec_dict):
"""Tag #/definitions as being models with a 'x-model' key so that they can
be recognized after jsonref inlines $refs.
:param spec_dict: swagger spec in dict form
"""
# TODO: unit test + docstring
# TODO: Also Tag models defined via external referencing (read #45)
models_dict = spec_dict.get('definitions', {})
for model_name, model_spec in iteritems(models_dict):
model_type = model_spec.get('type')
# default type type to 'object' since most swagger specs don't bother
# to specify this
if model_type is None:
model_type = model_spec['type'] = 'object'
# only tag objects. Not all #/definitions map to a Model type - can
# be primitive or array, for example
if model_type == 'object':
model_spec[MODEL_MARKER] = model_name
def fix_malformed_model_refs(spec):
"""jsonref doesn't understand { $ref: Category } so just fix it up to
{ $ref: #/definitions/Category } when the ref name matches a #/definitions
name. Yes, this is hacky!
:param spec: Swagger spec in dict form
"""
# TODO: fix this in a sustainable way in a fork of jsonref and try to
# upstream
# TODO: unit test
model_names = [model_name for model_name in spec.get('definitions', {})]
def descend(fragment):
if is_dict_like(fragment):
for k, v in iteritems(fragment):
if k == '$ref' and v in model_names:
fragment[k] = "#/definitions/{0}".format(v)
descend(v)
elif is_list_like(fragment):
for element in fragment:
descend(element)
descend(spec)
def is_model(spec):
"""
:param spec: specification for a swagger object
:type spec: dict
:return: True if the spec has been "marked" as a model type.
"""
return MODEL_MARKER in spec
def create_model_docstring(model_spec):
"""
:param model_spec: specification for a model in dict form
:rtype: string or unicode
"""
s = 'Attributes:\n\n\t'
attr_iter = iter(sorted(iteritems(model_spec['properties'])))
# TODO: Add more stuff available in the spec - 'required', 'example', etc
for attr_name, attr_spec in attr_iter:
schema_type = attr_spec['type']
if schema_type in SWAGGER_PRIMITIVES:
# TODO: update to python types and take 'format' into account
attr_type = schema_type
elif schema_type == 'array':
array_spec = attr_spec['items']
if is_model(array_spec):
array_type = array_spec[MODEL_MARKER]
else:
array_type = array_spec['type']
attr_type = u'list of {0}'.format(array_type)
elif is_model(attr_spec):
attr_type = attr_spec[MODEL_MARKER]
elif schema_type == 'object':
attr_type = 'dict'
s += u'{0}: {1}'.format(attr_name, attr_type)
if attr_spec.get('description'):
s += u' - {0}'.format(attr_spec['description'])
s += '\n\t'
return s
| {
"repo_name": "admetricks/bravado-core",
"path": "bravado_core/model.py",
"copies": "1",
"size": "8127",
"license": "bsd-3-clause",
"hash": 7683172662376320000,
"line_mean": 33.436440678,
"line_max": 80,
"alpha_frac": 0.6339362618,
"autogenerated": false,
"ratio": 3.9760273972602738,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00005231219920485457,
"num_lines": 236
} |
from functools import partial
from sklearn.base import ClassifierMixin, RegressorMixin
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.tree._tree import Tree, TREE_LEAF
import numpy as np
from sklearn_pmml.convert.model import EstimatorConverter, ModelMode, Schema
from sklearn_pmml.convert.features import Feature, CategoricalFeature, NumericFeature
import sklearn_pmml.pmml as pmml
from sklearn_pmml.convert.utils import estimator_to_converter
class DecisionTreeConverter(EstimatorConverter):
SPLIT_BINARY = 'binarySplit'
OPERATOR_LE = 'lessOrEqual'
NODE_ROOT = 0
OUTPUT_PROBABILITY = 'proba'
OUTPUT_LABEL = 'proba'
def __init__(self, estimator, context, mode):
super(DecisionTreeConverter, self).__init__(estimator, context, mode)
assert len(self.context.schemas[Schema.OUTPUT]) == 1, 'Only one-label trees are supported'
assert hasattr(estimator, 'tree_'), 'Estimator has no tree_ attribute'
if mode == ModelMode.CLASSIFICATION:
if isinstance(self.context.schemas[Schema.OUTPUT][0], CategoricalFeature):
self.prediction_output = self.OUTPUT_LABEL
else:
self.prediction_output = self.OUTPUT_PROBABILITY
assert isinstance(self.estimator, ClassifierMixin), \
'Only a classifier can be serialized in classification mode'
if mode == ModelMode.REGRESSION:
assert isinstance(self.context.schemas[Schema.OUTPUT][0], NumericFeature), \
'Only a numeric feature can be an output of regression'
assert isinstance(self.estimator, RegressorMixin), \
'Only a regressor can be serialized in regression mode'
assert estimator.tree_.value.shape[1] == len(self.context.schemas[Schema.OUTPUT]), \
'Tree outputs {} results while the schema specifies {} output fields'.format(
estimator.tree_.value.shape[1], len(self.context.schemas[Schema.OUTPUT]))
# create hidden variables for each categorical output
# TODO: this code is copied from the ClassifierConverter. To make things right, we need an abstract tree
# TODO: converter and subclasses for classifier and regression converters
internal_schema = list(filter(lambda x: isinstance(x, CategoricalFeature), self.context.schemas[Schema.OUTPUT]))
self.context.schemas[Schema.INTERNAL] = internal_schema
def _model(self):
assert Schema.NUMERIC in self.context.schemas, \
'Either build transformation dictionary or provide {} schema in context'.format(Schema.NUMERIC)
tm = pmml.TreeModel(functionName=self.model_function.value, splitCharacteristic=self.SPLIT_BINARY)
tm.append(self.mining_schema())
tm.append(self.output())
tm.Node = self._transform_node(
self.estimator.tree_,
self.NODE_ROOT,
self.context.schemas[Schema.NUMERIC],
self.context.schemas[Schema.OUTPUT][0]
)
return tm
def model(self, verification_data=None):
assert Schema.NUMERIC in self.context.schemas, \
'Either build transformation dictionary or provide {} schema in context'.format(Schema.NUMERIC)
tm = self._model()
if verification_data is not None:
tm.ModelVerification = self.model_verification(verification_data)
return tm
def _transform_node(self, tree, index, input_schema, output_feature, enter_condition=None):
"""
Recursive mapping of sklearn Tree into PMML Node tree
:return: Node element
"""
assert isinstance(tree, Tree)
assert isinstance(input_schema, list)
assert isinstance(output_feature, Feature)
node = pmml.Node()
if enter_condition is None:
node.append(pmml.True_())
else:
node.append(enter_condition)
node.recordCount = tree.n_node_samples[index]
if tree.children_left[index] != TREE_LEAF:
feature = input_schema[tree.feature[index]]
assert isinstance(feature, Feature)
left_child = self._transform_node(
tree,
tree.children_left[index],
input_schema,
output_feature,
enter_condition=pmml.SimplePredicate(
field=feature.full_name, operator=DecisionTreeConverter.OPERATOR_LE, value_=tree.threshold[index]
)
)
right_child = self._transform_node(tree, tree.children_right[index], input_schema, output_feature)
if self.model_function == ModelMode.CLASSIFICATION:
score, score_prob = None, 0.0
for i in range(len(tree.value[index][0])):
left_score = left_child.ScoreDistribution[i]
right_score = right_child.ScoreDistribution[i]
prob = float(left_score.recordCount + right_score.recordCount) / node.recordCount
node.append(pmml.ScoreDistribution(
recordCount=left_score.recordCount + right_score.recordCount,
value_=left_score.value_,
confidence=prob
))
if score_prob < prob:
score, score_prob = left_score.value_, prob
node.score = score
node.append(left_child).append(right_child)
else:
node_value = np.array(tree.value[index][0])
if self.model_function == ModelMode.CLASSIFICATION:
probs = node_value / float(node_value.sum())
for i in range(len(probs)):
node.append(pmml.ScoreDistribution(
confidence=probs[i],
recordCount=node_value[i],
value_=output_feature.from_number(i)
))
node.score = output_feature.from_number(probs.argmax())
elif self.model_function == ModelMode.REGRESSION:
node.score = node_value[0]
return node
def output(self):
"""
Output section of PMML contains all model outputs.
Classification tree output contains output variable as a label,
and <variable>#<value> as a probability of a value for a variable
:return: pmml.Output
"""
output = pmml.Output()
# the response variables
for feature in self.context.schemas[Schema.OUTPUT]:
output_field = pmml.OutputField(
name=Schema.OUTPUT.extract_feature_name(feature),
feature='predictedValue',
optype=feature.optype.value,
dataType=feature.data_type.value
)
output.append(output_field)
# the probabilities for categories; should only be populated for classification jobs
for feature in self.context.schemas[Schema.CATEGORIES]:
output_field = pmml.OutputField(
name=Schema.CATEGORIES.extract_feature_name(feature),
optype=feature.optype.value,
dataType=feature.data_type.value,
feature='probability',
targetField=Schema.INTERNAL.extract_feature_name(feature.namespace),
value_=feature.name
)
output.append(output_field)
return output
estimator_to_converter[DecisionTreeClassifier] = partial(
DecisionTreeConverter, mode=ModelMode.CLASSIFICATION
)
estimator_to_converter[DecisionTreeRegressor] = partial(
DecisionTreeConverter, mode=ModelMode.REGRESSION
) | {
"repo_name": "YuHuaCheng/sklearn-pmml",
"path": "sklearn_pmml/convert/tree.py",
"copies": "2",
"size": "7712",
"license": "mit",
"hash": 8650804406269311000,
"line_mean": 44.3705882353,
"line_max": 120,
"alpha_frac": 0.625129668,
"autogenerated": false,
"ratio": 4.442396313364055,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6067525981364055,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sklearn.tree import DecisionTreeClassifier
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raise_message
import pytest
from skopt import gbrt_minimize
from skopt import forest_minimize
from skopt.benchmarks import bench1
from skopt.benchmarks import bench2
from skopt.benchmarks import bench3
from skopt.benchmarks import bench4
MINIMIZERS = [("ET", partial(forest_minimize, base_estimator='ET')),
("RF", partial(forest_minimize, base_estimator='RF')),
("gbrt", gbrt_minimize)]
@pytest.mark.fast_test
@pytest.mark.parametrize("base_estimator", [42, DecisionTreeClassifier()])
def test_forest_minimize_api(base_estimator):
# invalid string value
assert_raise_message(ValueError,
"Valid strings for the base_estimator parameter",
forest_minimize, lambda x: 0., [],
base_estimator='abc')
# not a string nor a regressor
assert_raise_message(ValueError,
"has to be a regressor",
forest_minimize, lambda x: 0., [],
base_estimator=base_estimator)
def check_minimize(minimizer, func, y_opt, dimensions, margin,
n_calls, n_random_starts=10, x0=None):
for n in range(3):
r = minimizer(
func, dimensions, n_calls=n_calls, random_state=n,
n_random_starts=n_random_starts, x0=x0)
assert_less(r.fun, y_opt + margin)
@pytest.mark.slow_test
@pytest.mark.parametrize("name, minimizer", MINIMIZERS)
def test_tree_based_minimize(name, minimizer):
check_minimize(minimizer, bench1, 0.,
[(-2.0, 2.0)], 0.05, 25, 5)
# XXX: We supply points at the edge of the search
# space as an initial point to the minimizer.
# This makes sure that the RF model can find the minimum even
# if all the randomly sampled points are one side of the
# the minimum, since for a decision tree any point greater than
# max(sampled_points) would give a constant value.
X0 = [[-5.6], [-5.8], [5.8], [5.6]]
check_minimize(minimizer, bench2, -5,
[(-6.0, 6.0)], 0.1, 100, 10, X0)
check_minimize(minimizer, bench3, -0.9,
[(-2.0, 2.0)], 0.05, 25)
check_minimize(minimizer, bench4, 0.0,
[("-2", "-1", "0", "1", "2")], 0.05, 10, 1)
| {
"repo_name": "ccauet/scikit-optimize",
"path": "skopt/tests/test_forest_opt.py",
"copies": "1",
"size": "2442",
"license": "bsd-3-clause",
"hash": -9098783385870677000,
"line_mean": 37.15625,
"line_max": 74,
"alpha_frac": 0.6203931204,
"autogenerated": false,
"ratio": 3.5911764705882354,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47115695909882355,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sqlalchemy.ext.associationproxy import association_proxy, AssociationProxy
from sqlalchemy.orm import Query, aliased, mapper, relationship, synonym
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.orm.scoping import ScopedSession
from sqlalchemy.orm.session import Session, object_session
from sqlalchemy.schema import Column, ForeignKey, Table
from sqlalchemy.sql.expression import and_, bindparam, select, exists
from sqlalchemy.sql.operators import ColumnOperators
from sqlalchemy.types import Integer
from pokedex.db import markdown
class LocalAssociationProxy(AssociationProxy, ColumnOperators):
"""An association proxy for names in the default language
Over the regular association_proxy, this provides sorting and filtering
capabilities, implemented via SQL subqueries.
"""
def __clause_element__(self):
q = select([self.remote_attr])
q = q.where(self.target_class.foreign_id == self.owning_class.id)
q = q.where(self.target_class.local_language_id == bindparam('_default_language_id'))
return q
def operate(self, op, *other, **kwargs):
q = select([self.remote_attr])
q = q.where(self.target_class.foreign_id == self.owning_class.id)
q = q.where(self.target_class.local_language_id == bindparam('_default_language_id'))
q = q.where(op(self.remote_attr, *other))
return exists(q)
def _getset_factory_factory(column_name, string_getter):
"""Hello! I am a factory for creating getset_factory functions for SQLA.
I exist to avoid the closure-in-a-loop problem.
"""
def getset_factory(underlying_type, instance):
def getter(translations):
if translations is None:
return None
text = getattr(translations, column_name)
if text is None:
return text
session = object_session(translations)
language = translations.local_language
return string_getter(text, session, language)
def setter(translations, value):
# The string must be set on the Translation directly.
raise AttributeError("Cannot set %s" % column_name)
return getter, setter
return getset_factory
def create_translation_table(_table_name, foreign_class, relation_name,
language_class, relation_lazy='select', **kwargs):
"""Creates a table that represents some kind of data attached to the given
foreign class, but translated across several languages. Returns the new
table's mapped class. It won't be declarative, but it will have a
`__table__` attribute so you can retrieve the Table object.
`foreign_class` must have a `__singlename__`, currently only used to create
the name of the foreign key column.
Also supports the notion of a default language, which is attached to the
session. This is English by default, for historical and practical reasons.
Usage looks like this:
class Foo(Base): ...
create_translation_table('foo_bars', Foo, 'bars',
name = Column(...),
)
# Now you can do the following:
foo.name
foo.name_map['en']
foo.foo_bars['en']
foo.name_map['en'] = "new name"
del foo.name_map['en']
q.options(joinedload(Foo.bars_local))
q.options(joinedload(Foo.bars))
The following properties are added to the passed class:
- `(relation_name)`, a relation to the new table. It uses a dict-based
collection class, where the keys are language identifiers and the values
are rows in the created tables.
- `(relation_name)_local`, a relation to the row in the new table that
matches the current default language.
- `(relation_name)_table`, the class created by this function.
Note that these are distinct relations. Even though the former necessarily
includes the latter, SQLAlchemy doesn't treat them as linked; loading one
will not load the other. Modifying both within the same transaction has
undefined behavior.
For each column provided, the following additional attributes are added to
Foo:
- `(column)_map`, an association proxy onto `foo_bars`.
- `(column)`, an association proxy onto `foo_bars_local`.
Pardon the naming disparity, but the grammar suffers otherwise.
Modifying these directly is not likely to be a good idea.
For Markdown-formatted columns, `(column)_map` and `(column)` will give
Markdown objects.
"""
# n.b.: language_class only exists for the sake of tests, which sometimes
# want to create tables entirely separate from the pokedex metadata
foreign_key_name = foreign_class.__singlename__ + '_id'
Translations = type(_table_name, (object,), {
'_language_identifier': association_proxy('local_language', 'identifier'),
'relation_name': relation_name,
'__tablename__': _table_name,
})
# Create the table object
table = Table(_table_name, foreign_class.__table__.metadata,
Column(foreign_key_name, Integer, ForeignKey(foreign_class.id),
primary_key=True, nullable=False,
doc=u"ID of the %s these texts relate to" % foreign_class.__singlename__),
Column('local_language_id', Integer, ForeignKey(language_class.id),
primary_key=True, nullable=False,
doc=u"Language these texts are in"),
)
Translations.__table__ = table
# Add ye columns
# Column objects have a _creation_order attribute in ascending order; use
# this to get the (unordered) kwargs sorted correctly
kwitems = kwargs.items()
kwitems.sort(key=lambda kv: kv[1]._creation_order)
for name, column in kwitems:
column.name = name
table.append_column(column)
# Construct ye mapper
mapper(Translations, table, properties={
'foreign_id': synonym(foreign_key_name),
'local_language': relationship(language_class,
primaryjoin=table.c.local_language_id == language_class.id,
innerjoin=True),
})
# Add full-table relations to the original class
# Foo.bars_table
setattr(foreign_class, relation_name + '_table', Translations)
# Foo.bars
setattr(foreign_class, relation_name, relationship(Translations,
primaryjoin=foreign_class.id == Translations.foreign_id,
collection_class=attribute_mapped_collection('local_language'),
))
# Foo.bars_local
# This is a bit clever; it uses bindparam() to make the join clause
# modifiable on the fly. db sessions know the current language and
# populate the bindparam.
# The 'dummy' value is to trick SQLA; without it, SQLA thinks this
# bindparam is just its own auto-generated clause and everything gets
# fucked up.
local_relation_name = relation_name + '_local'
setattr(foreign_class, local_relation_name, relationship(Translations,
primaryjoin=and_(
Translations.foreign_id == foreign_class.id,
Translations.local_language_id == bindparam('_default_language_id',
value='dummy', type_=Integer, required=True),
),
foreign_keys=[Translations.foreign_id, Translations.local_language_id],
uselist=False,
lazy=relation_lazy,
))
# Add per-column proxies to the original class
for name, column in kwitems:
getset_factory = None
string_getter = column.info.get('string_getter')
if string_getter:
getset_factory = _getset_factory_factory(
column.name, string_getter)
# Class.(column) -- accessor for the default language's value
setattr(foreign_class, name,
LocalAssociationProxy(local_relation_name, name,
getset_factory=getset_factory))
# Class.(column)_map -- accessor for the language dict
# Need a custom creator since Translations doesn't have an init, and
# these are passed as *args anyway
def creator(language, value):
row = Translations()
row.local_language = language
setattr(row, name, value)
return row
setattr(foreign_class, name + '_map',
association_proxy(relation_name, name, creator=creator,
getset_factory=getset_factory))
# Add to the list of translation classes
foreign_class.translation_classes.append(Translations)
# Done
return Translations
class MultilangQuery(Query):
def __iter__(self):
if '_default_language_id' not in self._params or self._params['_default_language_id'] == 'dummy':
self._params = self._params.copy()
self._params['_default_language_id'] = self.session.default_language_id
return super(MultilangQuery, self).__iter__()
class MultilangSession(Session):
"""A tiny Session subclass that adds support for a default language.
Needs to be used with `MultilangScopedSession`, below.
"""
default_language_id = None
markdown_extension_class = markdown.PokedexLinkExtension
def __init__(self, *args, **kwargs):
if 'default_language_id' in kwargs:
self.default_language_id = kwargs.pop('default_language_id')
markdown_extension_class = kwargs.pop('markdown_extension_class',
self.markdown_extension_class)
self.markdown_extension = markdown_extension_class(self)
kwargs.setdefault('query_cls', MultilangQuery)
super(MultilangSession, self).__init__(*args, **kwargs)
class MultilangScopedSession(ScopedSession):
"""Dispatches language selection to the attached Session."""
@property
def default_language_id(self):
"""Passes the new default language id through to the current session.
"""
return self.registry().default_language_id
@default_language_id.setter
def default_language_id(self, new):
self.registry().default_language_id = new
@property
def markdown_extension(self):
return self.registry().markdown_extension
| {
"repo_name": "DaMouse404/pokedex",
"path": "pokedex/db/multilang.py",
"copies": "3",
"size": "10159",
"license": "mit",
"hash": -4088000991344343600,
"line_mean": 38.9960629921,
"line_max": 105,
"alpha_frac": 0.6685697411,
"autogenerated": false,
"ratio": 4.231153685964181,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00218809564346956,
"num_lines": 254
} |
from functools import partial
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import DDLElement
__all__ = ('CreateUpdateAtTrigger', 'DropUpdateAtTrigger',
'UPDATE_AT_DDL_STATEMENT')
UPDATE_AT_PROCEDURE = 'set_updated_at_timestamp()'
UPDATE_AT_TRIGGER = 'trigger_column_updated_at'
# DDL to create set_updated_at_timestamp function
UPDATE_AT_DDL_STATEMENT = """\
CREATE OR REPLACE FUNCTION {procedure}
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW() AT TIME ZONE 'utc';
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';""".format(procedure=UPDATE_AT_PROCEDURE)
class DropTrigger(DDLElement):
def __init__(self, name, trigger_name):
self.name = name
self.trigger_name = trigger_name
class CreateBeforeUpdateTrigger(DDLElement):
def __init__(self, name, trigger_name, procedure):
self.name = name
self.trigger_name = trigger_name
self.procedure = procedure
@compiles(CreateBeforeUpdateTrigger)
def compile(element, compiler, **kw):
statement = """\
CREATE TRIGGER {trigger_name}
BEFORE UPDATE
ON {qualified_name}
FOR EACH ROW
EXECUTE PROCEDURE {procedure}""".format(
trigger_name=element.trigger_name, qualified_name=element.name,
procedure=element.procedure
)
return statement
@compiles(DropTrigger)
def compile(element, compiler, **kw):
statement = 'DROP TRIGGER {trigger_name} ON {table_name}'.format(
table_name=element.name, trigger_name=element.trigger_name
)
return statement
CreateUpdateAtTrigger = partial(CreateBeforeUpdateTrigger,
procedure=UPDATE_AT_PROCEDURE,
trigger_name=UPDATE_AT_TRIGGER)
DropUpdateAtTrigger = partial(DropTrigger, trigger_name=UPDATE_AT_TRIGGER)
| {
"repo_name": "portfoliome/pgawedge",
"path": "pgawedge/triggers.py",
"copies": "1",
"size": "1820",
"license": "mit",
"hash": -3821061356292938000,
"line_mean": 26.5757575758,
"line_max": 74,
"alpha_frac": 0.6835164835,
"autogenerated": false,
"ratio": 3.7142857142857144,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48978021977857145,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from sqlalchemy.orm import joinedload_all
from pyramid.config import Configurator
from clld.web.app import CtxFactoryQuery, menu_item
from clld.db.models.common import Contribution, Unit
from clld.interfaces import ICtxFactoryQuery
# we must make sure custom models are known at database initialization!
from tsezacp import models
_ = lambda s: s
_('Contributions')
_('Contribution')
_('Units')
_('Unit')
_('Sentence')
_('Sentences')
class AcpCtxFactoryQuery(CtxFactoryQuery):
def refined_query(self, query, model, req):
"""Derived classes may override this method to add model-specific query
refinements of their own.
"""
if model == Unit:
return query.options(joinedload_all(
models.Morpheme.occurrences, models.MorphemeInWord.word, models.WordInLine.line, models.Line.text))
if model == Contribution:
return query.options(joinedload_all(
models.Text.lines, models.Line.words, models.WordInLine.morphemes, models.MorphemeInWord.morpheme))
return query
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
config.include('clldmpg')
config.registry.registerUtility(AcpCtxFactoryQuery(), ICtxFactoryQuery)
config.register_menu(
('dataset', partial(menu_item, 'dataset', label='Home')),
('contributions', partial(menu_item, 'contributions')),
('units', partial(menu_item, 'units')),
('examples', partial(menu_item, 'sentences')),
)
return config.make_wsgi_app()
| {
"repo_name": "clld/tsezacp",
"path": "tsezacp/__init__.py",
"copies": "1",
"size": "1654",
"license": "apache-2.0",
"hash": 312555078288269800,
"line_mean": 32.7551020408,
"line_max": 115,
"alpha_frac": 0.692261185,
"autogenerated": false,
"ratio": 3.742081447963801,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9926205757523969,
"avg_score": 0.001627375087966221,
"num_lines": 49
} |
from functools import partial
from src.agent.decisions import *
class Agent(object):
"""
This is the core class that other intelligent agents inherit from.
Agents are the first step towards reification -
1. They have 'needs' they are weighted higher via their biases
2. They have 'wants' that can sometimes weigh higher than their needs
3. These 'needs' and 'wants' fluctuate with time AND state
Agents follow the same procedure:
OBSERVE - Get a perception vector (Note this requires interacting with the 'world')
ORIENT - Consider/calculate biases (Note this doesn't get evaluated until right before deciding)
DECIDE - Pick the maximal utility out of the created decision vector
ACT - Translate the picked symbol into the real world (Should cause side effects of some sort)
"""
# Vector Factories
# NOTE: These can be overriden with variations of vectors
OBS = PerceptionVector
DEC = DecisionVector
BIA = BiasVector
# NOTE: This also should be overwritten
CLASS_MODIFIERS = {}
def __init__(self):
###########
# PRIVATE #
###########
self._selected_action = None
# Vectors
self._obs_v = Agent.OBS()
self._dec_v = Agent.DEC()
self._bia_v = Agent.BIA()
self._state = {} # Internal variables relevant to calculating biases
##########
# PUBLIC #
##########
# NOTE: This is an object that the entity gets context from, not necessarily
# the globe or a location or whatever.
self.world = None
self.needs = None
self.wants = None
def update_func(self):
"""
Generates or updates internal state.
"""
raise NotImplementedError
def orient_func(self):
"""
Generates the bias vector given internal state.
"""
raise NotImplementedError
def update(self):
"""
This is a strictly internal updating function, meaning this is just a recalculation;
no new data should have been added. These are things that will ultimately affect
the bias vector, e.g. hunger/thirst/etc.
To reflect changes in the 'world', use observe.
"""
action = self.update_func()
def observe(self, obs, refresh=False):
"""
This adds an observed entity into the perception vector.
An Agent should never call this internally.
"""
if(refresh):
self.__obs_v = Agent.OBS()
self._obs_v.add(obs)
def orient(self):
"""
Transforms needs/wants into a bias vector
"""
self.orient_func()
def decide(self):
"""
This both sets the next action and informs whoever is asking that
the agent has decided and wants to act.
"""
decision = self._dec_v.decide()
symbolic_reference = decision[1]
self.__selected_action = symbolic_reference
return symbolic_reference
def act(self):
"""
At the end of the day, the agent has a symbol that decides what the
next action is. How that symbol gets interpreted into code is at this
point something I haven't fully planned out. So yolo.
"""
self.__selected_action = None
return
# TODO: Ideally it looks something like this
# where the selected action is a composed function
if(self.__selected_action):
self.__selected_action()
self.__selected_action = None
| {
"repo_name": "lexwraith/ThisIsBob",
"path": "src/agent/agent.py",
"copies": "1",
"size": "3586",
"license": "apache-2.0",
"hash": -2926316878970876400,
"line_mean": 29.6495726496,
"line_max": 100,
"alpha_frac": 0.6081985499,
"autogenerated": false,
"ratio": 4.522068095838588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0010745730451307068,
"num_lines": 117
} |
from functools import partial
from stdnet.utils import zip, JSPLITTER, EMPTYJSON, iteritems
from stdnet.utils.exceptions import *
from .base import ModelType, ModelBase, raise_kwargs
from .session import Manager
__all__ = ['StdModel', 'create_model', 'model_to_dict']
class StdModel(ModelBase):
'''A :class:`Model` which contains data in :class:`Field`. This represents
the main class of :mod:`stdnet.odm` module.'''
_model_type = 'object'
abstract = True
_loadedfields = None
def __init__(self, *args, **kwargs):
meta = self._meta
pkname = meta.pk.name
setattr(self, pkname, kwargs.pop(pkname, None))
kwargs.pop(meta.pk.name, None)
for field in meta.scalarfields:
field.set_value(self, kwargs.pop(field.name, None))
attributes = meta.attributes
if args:
N = len(args)
if N > len(attributes):
raise ValueError('Too many attributes')
attrs, attributes = attributes[:N], attributes[N:]
for name, value in zip(attrs, args):
setattr(self, name, value)
for name in attributes:
setattr(self, name, kwargs.pop(name, None))
if kwargs:
raise_kwargs(self, kwargs)
@property
def has_all_data(self):
'''``True`` if this :class:`StdModel` instance has all back-end data
loaded. This applies to persistent instances only. This property is used when
committing changes. If all data is available, the commit will replace the
previous object data entirely, otherwise it will only update it.'''
return self.get_state().persistent and self._loadedfields is None
def set(self, name, value):
meta = self._meta
if name in meta.dfields:
meta.dfields[name].set_value(self, value)
elif name in meta.attributes:
setattr(self, name, value)
else:
raise AttributeError('Model has no field/attribute %s' % name)
def loadedfields(self):
'''Generator of fields loaded from database'''
if self._loadedfields is None:
for field in self._meta.scalarfields:
yield field
else:
fields = self._meta.dfields
processed = set()
for name in self._loadedfields:
if name in processed:
continue
if name in fields:
processed.add(name)
yield fields[name]
else:
name = name.split(JSPLITTER)[0]
if name in fields and name not in processed:
field = fields[name]
if field.type == 'json object':
processed.add(name)
yield field
def fieldvalue_pairs(self, exclude_cache=False):
'''Generator of fields,values pairs. Fields correspond to
the ones which have been loaded (usually all of them) or
not loaded but modified.
Check the :ref:`load_only <performance-loadonly>` query function for more
details.
If *exclude_cache* evaluates to ``True``, fields with :attr:`Field.as_cache`
attribute set to ``True`` won't be included.
:rtype: a generator of two-elements tuples'''
for field in self._meta.scalarfields:
if exclude_cache and field.as_cache:
continue
name = field.attname
if hasattr(self, name):
yield field, getattr(self, name)
def clear_cache_fields(self):
'''Set cache fields to ``None``. Check :attr:`Field.as_cache`
for information regarding fields which are considered cache.'''
for field in self._meta.scalarfields:
if field.as_cache:
setattr(self, field.name, None)
def get_attr_value(self, name):
'''Retrieve the ``value`` for the attribute ``name``. The ``name``
can be nested following the :ref:`double underscore <tutorial-underscore>`
notation, for example ``group__name``. If the attribute is not available it
raises :class:`AttributeError`.'''
if name in self._meta.dfields:
return self._meta.dfields[name].get_value(self)
elif not name.startswith('__') and JSPLITTER in name:
bits = name.split(JSPLITTER)
fname = bits[0]
if fname in self._meta.dfields:
return self._meta.dfields[fname].get_value(self, *bits[1:])
else:
return getattr(self, name)
else:
return getattr(self, name)
def clone(self, **data):
'''Utility method for cloning the instance as a new object.
:parameter data: additional which override field data.
:rtype: a new instance of this class.
'''
meta = self._meta
session = self.session
pkname = meta.pkname()
pkvalue = data.pop(pkname, None)
fields = self.todict(exclude_cache=True)
fields.update(data)
fields.pop('__dbdata__', None)
obj = self._meta.make_object((pkvalue, None, fields))
obj.session = session
return obj
def is_valid(self):
'''Kick off the validation algorithm by checking all
:attr:`StdModel.loadedfields` against their respective validation algorithm.
:rtype: Boolean indicating if the model validates.'''
return self._meta.is_valid(self)
def todict(self, exclude_cache=False):
'''Return a dictionary of serialised scalar field for pickling.
If the *exclude_cache* flag is ``True``, fields with :attr:`Field.as_cache`
attribute set to ``True`` will be excluded.'''
odict = {}
for field, value in self.fieldvalue_pairs(exclude_cache=exclude_cache):
value = field.serialise(value)
if value:
odict[field.name] = value
if self._dbdata and 'id' in self._dbdata:
odict['__dbdata__'] = {'id': self._dbdata['id']}
return odict
def _to_json(self, exclude_cache):
pk = self.pkvalue()
if pk:
yield self._meta.pkname(), pk
for field, value in self.fieldvalue_pairs(exclude_cache=
exclude_cache):
value = field.json_serialise(value)
if value not in EMPTYJSON:
yield field.name, value
def tojson(self, exclude_cache=True):
'''Return a JSON serialisable dictionary representation.'''
return dict(self._to_json(exclude_cache))
def load_fields(self, *fields):
'''Load extra fields to this :class:`StdModel`.'''
if self._loadedfields is not None:
if self.session is None:
raise SessionNotAvailable('No session available')
meta = self._meta
kwargs = {meta.pkname(): self.pkvalue()}
obj = session.query(self).load_only(fields).get(**kwargs)
for name in fields:
field = meta.dfields.get(name)
if field is not None:
setattr(self, field.attname,
getattr(obj, field.attname, None))
def get_state_action(self):
return 'override' if self._loadedfields is None else 'update'
def load_related_model(self, name, load_only=None, dont_load=None):
'''Load a the :class:`ForeignKey` field ``name`` if this is part of the
fields of this model and if the related object is not already loaded.
It is used by the lazy loading mechanism of :ref:`one-to-many <one-to-many>`
relationships.
:parameter name: the :attr:`Field.name` of the :class:`ForeignKey` to load.
:parameter load_only: Optional parameters which specify the fields to load.
:parameter dont_load: Optional parameters which specify the fields not to load.
:return: the related :class:`StdModel` instance.
'''
field = self._meta.dfields.get(name)
if not field:
raise ValueError('Field "%s" not available' % name)
elif not field.type == 'related object':
raise ValueError('Field "%s" not a foreign key' % name)
return self._load_related_model(field, load_only, dont_load)
@classmethod
def get_field(cls, name):
'''Returns the :class:`Field` instance at ``name`` if available,
otherwise it returns ``None``.'''
return cls._meta.dfields.get(name)
@classmethod
def from_base64_data(cls, **kwargs):
'''Load a :class:`StdModel` from possibly base64encoded data.
This method is used to load models from data obtained from the :meth:`tojson`
method.'''
o = cls()
meta = cls._meta
pkname = meta.pkname()
for name, value in iteritems(kwargs):
if name == pkname:
field = meta.pk
elif name in meta.dfields:
field = meta.dfields[name]
else:
continue
value = field.to_python(value)
setattr(o, field.attname, value)
return o
@classmethod
def pk(cls):
'''Returns the primary key :class:`Field` for this model. This is a
proxy for the :attr:`Metaclass.pk` attribute.'''
return cls._meta.pk
@classmethod
def get_unique_instance(cls, items):
if items:
if len(items) == 1:
return items[0]
else:
raise QuerySetError('Non unique results')
else:
raise cls.DoesNotExist()
# PICKLING SUPPORT
def __getstate__(self):
return (self.id, self._loadedfields, self.todict())
def __setstate__(self, state):
self._meta.load_state(self, state)
# INTERNALS
def _load_related_model(self, field, load_only=None, dont_load=None):
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
return getattr(self, cache_name)
else:
val = getattr(self, field.attname)
if val is None:
return self.__set_related_value(field)
else:
pkname = field.relmodel._meta.pkname()
qs = self.session.query(field.relmodel)
if load_only:
qs = qs.load_only(*load_only)
if dont_load:
qs = qs.dont_load(*dont_load)
callback = partial(self.__set_related_value, field)
return qs.filter(**{pkname: val}).items(callback=callback)
def __set_related_value(self, field, items=None):
try:
rel_obj = self.get_unique_instance(items)
except self.DoesNotExist:
if field.required:
raise
else:
rel_obj = None
setattr(self, field.attname, None)
setattr(self, field.get_cache_name(), rel_obj)
return rel_obj
def create_model(name, *attributes, **params):
'''Create a :class:`Model` class for objects requiring
and interface similar to :class:`StdModel`. We refers to this type
of models as :ref:`local models <local-models>` since instances of such
models are not persistent on a :class:`stdnet.BackendDataServer`.
:param name: Name of the model class.
:param attributes: positiona attribute names. These are the only attribute
available to the model during the default constructor.
:param params: key-valued parameter to pass to the :class:`ModelMeta`
constructor.
:return: a local :class:`Model` class.
'''
params['register'] = False
params['attributes'] = attributes
kwargs = {'manager_class': params.pop('manager_class', Manager),
'Meta': params}
return ModelType(name, (StdModel,), kwargs)
def model_to_dict(instance, fields=None, exclude=None):
if isinstance(instance, StdModel):
return instance.todict()
else:
d = {}
for field in instance._meta.fields:
default = field.get_default()
if default:
d[field.name] = default
return d
| {
"repo_name": "lsbardel/python-stdnet",
"path": "stdnet/odm/models.py",
"copies": "1",
"size": "11993",
"license": "bsd-3-clause",
"hash": 2181175628786513000,
"line_mean": 36.5956112853,
"line_max": 79,
"alpha_frac": 0.5959309597,
"autogenerated": false,
"ratio": 4.1875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00004543182953977557,
"num_lines": 319
} |
from functools import partial
from torch import nn
from .myronenko import MyronenkoLayer, MyronenkoResidualBlock
from . import resnet
class BasicDecoder(nn.Module):
def __init__(self, in_planes, layers, block=resnet.BasicBlock, plane_dilation=2, upsampling_mode="trilinear",
upsampling_scale=2):
super(BasicDecoder, self).__init__()
self.layers = nn.ModuleList()
self.conv1s = nn.ModuleList()
self.upsampling_mode = upsampling_mode
self.upsampling_scale = upsampling_scale
layer_planes = in_planes
for n_blocks in layers:
self.conv1s.append(resnet.conv1x1x1(in_planes=layer_planes,
out_planes=int(layer_planes/plane_dilation)))
layer = nn.ModuleList()
layer_planes = int(layer_planes/plane_dilation)
for i_block in range(n_blocks):
layer.append(block(layer_planes, layer_planes))
self.layers.append(layer)
def forward(self, x):
for conv1, layer in zip(self.conv1s, self.layers):
x = conv1(x)
x = nn.functional.interpolate(x, scale_factor=self.upsampling_scale, mode=self.upsampling_mode)
for block in layer:
x = block(x)
return x
class MyronenkoDecoder(nn.Module):
def __init__(self, base_width=32, layer_blocks=None, layer=MyronenkoLayer, block=MyronenkoResidualBlock,
upsampling_scale=2, feature_reduction_scale=2, upsampling_mode="trilinear", align_corners=False,
layer_widths=None, use_transposed_convolutions=False, kernal_size=3):
super(MyronenkoDecoder, self).__init__()
if layer_blocks is None:
layer_blocks = [1, 1, 1]
self.layers = nn.ModuleList()
self.pre_upsampling_blocks = nn.ModuleList()
self.upsampling_blocks = list()
for i, n_blocks in enumerate(layer_blocks):
depth = len(layer_blocks) - (i + 1)
if layer_widths is not None:
out_width = layer_widths[depth]
in_width = layer_widths[depth + 1]
else:
out_width = base_width * (feature_reduction_scale ** depth)
in_width = out_width * feature_reduction_scale
if use_transposed_convolutions:
self.pre_upsampling_blocks.append(resnet.conv1x1x1(in_width, out_width, stride=1))
self.upsampling_blocks.append(partial(nn.functional.interpolate, scale_factor=upsampling_scale,
mode=upsampling_mode, align_corners=align_corners))
else:
self.pre_upsampling_blocks.append(nn.Sequential())
self.upsampling_blocks.append(nn.ConvTranspose3d(in_width, out_width, kernel_size=kernal_size,
stride=upsampling_scale, padding=1))
self.layers.append(layer(n_blocks=n_blocks, block=block, in_planes=out_width, planes=out_width,
kernal_size=kernal_size))
def forward(self, x):
for pre, up, lay in zip(self.pre_upsampling_blocks, self.upsampling_blocks, self.layers):
x = pre(x)
x = up(x)
x = lay(x)
return x
class MirroredDecoder(nn.Module):
def __init__(self, base_width=32, layer_blocks=None, layer=MyronenkoLayer, block=MyronenkoResidualBlock,
upsampling_scale=2, feature_reduction_scale=2, upsampling_mode="trilinear", align_corners=False,
layer_widths=None, use_transposed_convolutions=False, kernel_size=3):
super(MirroredDecoder, self).__init__()
self.use_transposed_convolutions = use_transposed_convolutions
if layer_blocks is None:
self.layer_blocks = [1, 1, 1, 1]
else:
self.layer_blocks = layer_blocks
self.layers = nn.ModuleList()
self.pre_upsampling_blocks = nn.ModuleList()
if use_transposed_convolutions:
self.upsampling_blocks = nn.ModuleList()
else:
self.upsampling_blocks = list()
self.base_width = base_width
self.feature_reduction_scale = feature_reduction_scale
self.layer_widths = layer_widths
for i, n_blocks in enumerate(self.layer_blocks):
depth = len(self.layer_blocks) - (i + 1)
in_width, out_width = self.calculate_layer_widths(depth)
if depth != 0:
self.layers.append(layer(n_blocks=n_blocks, block=block, in_planes=in_width, planes=in_width,
kernel_size=kernel_size))
if self.use_transposed_convolutions:
self.pre_upsampling_blocks.append(nn.Sequential())
self.upsampling_blocks.append(nn.ConvTranspose3d(in_width, out_width, kernel_size=kernel_size,
stride=upsampling_scale, padding=1))
else:
self.pre_upsampling_blocks.append(resnet.conv1x1x1(in_width, out_width, stride=1))
self.upsampling_blocks.append(partial(nn.functional.interpolate, scale_factor=upsampling_scale,
mode=upsampling_mode, align_corners=align_corners))
else:
self.layers.append(layer(n_blocks=n_blocks, block=block, in_planes=in_width, planes=out_width,
kernel_size=kernel_size))
def calculate_layer_widths(self, depth):
if self.layer_widths is not None:
out_width = self.layer_widths[depth]
in_width = self.layer_widths[depth + 1]
else:
if depth > 0:
out_width = int(self.base_width * (self.feature_reduction_scale ** (depth - 1)))
in_width = out_width * self.feature_reduction_scale
else:
out_width = self.base_width
in_width = self.base_width
return in_width, out_width
def forward(self, x):
for pre, up, lay in zip(self.pre_upsampling_blocks, self.upsampling_blocks, self.layers[:-1]):
x = lay(x)
x = pre(x)
x = up(x)
x = self.layers[-1](x)
return x
class Decoder1D(nn.Module):
def __init__(self, input_features, output_features, layer_blocks, layer_channels, block=resnet.BasicBlock1D,
kernel_size=3, upsample_factor=2, interpolation_mode="linear", interpolation_align_corners=True):
super(Decoder1D, self).__init__()
self.layers = nn.ModuleList()
self.conv1s = nn.ModuleList()
self.output_features = output_features
self.interpolation_mode = interpolation_mode
self.interpolation_align_corners = interpolation_align_corners
self.upsample_factor = upsample_factor
in_channels = input_features
for n_blocks, out_channels in zip(layer_blocks, layer_channels):
layer = nn.ModuleList()
self.conv1s.append(nn.Conv1d(in_channels=in_channels, out_channels=out_channels, kernel_size=1,
stride=1, bias=False))
for i_block in range(n_blocks):
layer.append(block(in_channels=out_channels, channels=out_channels, kernel_size=kernel_size, stride=1))
in_channels = out_channels
self.layers.append(layer)
def forward(self, x):
for (layer, conv1) in zip(self.layers, self.conv1s):
x = nn.functional.interpolate(x,
size=(x.shape[-1] * self.upsample_factor),
mode=self.interpolation_mode,
align_corners=self.interpolation_align_corners)
x = conv1(x)
for block in layer:
x = block(x)
return x
| {
"repo_name": "ellisdg/3DUnetCNN",
"path": "unet3d/models/pytorch/classification/decoder.py",
"copies": "1",
"size": "8019",
"license": "mit",
"hash": 8371905371162251000,
"line_mean": 48.5,
"line_max": 119,
"alpha_frac": 0.5771293179,
"autogenerated": false,
"ratio": 3.8645783132530123,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9932572699174194,
"avg_score": 0.001826986395763769,
"num_lines": 162
} |
from functools import partial
from .translation import _
class OriginalQuerysetMixin(object):
def get_queryset(self, request):
"""Returns a QuerySet of all model instances without default
manager 'objects' that can be edited by the
admin site. This is used by changelist_view."""
qs = self.model._base_manager.get_queryset()
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
class CopyActionMixin(object):
def copy_object(self, request, queryset):
# TODO: rewrite for all kind instances
for item in queryset:
fields = item.fields.all()
contexts = item.contexts.all()
item.id = None
item.save()
for field in fields:
field.id = None
field.settings_id = item.id
field.save()
for context in contexts:
context.id = None
context.settings_id = item.id
context.save()
self.message_user(request, _('Copies successfully created'))
copy_object.short_description = _('Create a copy of settings')
class ObjectInlineMixin(object):
def get_formset(self, request, obj=None, **kwargs):
"""Pass parent object to inline form"""
kwargs['formfield_callback'] = partial(
self.formfield_for_dbfield, request=request, obj=obj)
return super(ObjectInlineMixin, self) \
.get_formset(request, obj, **kwargs)
| {
"repo_name": "mtrgroup/django-mtr-utils",
"path": "mtr/utils/admin.py",
"copies": "1",
"size": "1560",
"license": "mit",
"hash": -5061417589292999000,
"line_mean": 27.8888888889,
"line_max": 68,
"alpha_frac": 0.5955128205,
"autogenerated": false,
"ratio": 4.521739130434782,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 54
} |
from functools import partial
from twisted.internet.defer import inlineCallbacks, succeed, fail
from vumi.tests.helpers import VumiTestCase
from vumi.transports.failures import PermanentFailure
from vumi.transports.parlayx_ussd import ParlayXUSSDTransport
from vumi.transports.parlayx_ussd.client import PolicyException, ServiceException
from vumi.transports.parlayx.soaputil import perform_soap_request
from vumi.transports.parlayx_ussd.tests.utils import (
create_ussd_reception_element)
from vumi.transports.tests.helpers import TransportHelper
class MockParlayXClient(object):
"""
A mock ``ParlayXClient`` that doesn't involve real HTTP requests but
instead uses canned responses.
"""
def __init__(self, start_ussd_notification=None, stop_ussd_notification=None,
send_ussd=None):
if start_ussd_notification is None:
start_ussd_notification = partial(succeed, None)
if stop_ussd_notification is None:
stop_ussd_notification = partial(succeed, None)
if send_ussd is None:
send_ussd = partial(succeed, 'request_message_id')
self.responses = {
'start_ussd_notification': start_ussd_notification,
'stop_ussd_notification': stop_ussd_notification,
'send_ussd': send_ussd}
self.calls = []
def _invoke_response(self, name, args):
"""
Invoke the canned response for the method name ``name`` and log the
invocation.
"""
self.calls.append((name, args))
return self.responses[name]()
def start_ussd_notification(self):
return self._invoke_response('start_ussd_notification', [])
def stop_ussd_notification(self):
return self._invoke_response('stop_ussd_notification', [])
def send_ussd(self, to_addr, content, linkid, message_id):
return self._invoke_response(
'send_ussd', [to_addr, content, linkid, message_id])
class TestParlayXUSSDTransport(VumiTestCase):
"""
Tests for `vumi.transports.parlayx_ussd.ParlayXUSSDTransport`.
"""
@inlineCallbacks
def setUp(self):
# TODO: Get rid of this hardcoded port number.
self.port = 19999
config = {
'web_notification_path': '/hello',
'web_notification_port': self.port,
'notification_endpoint_uri': 'endpoint_uri',
'short_code': '54321',
'remote_send_uri': 'send_uri',
'remote_notification_uri': 'notification_uri',
}
self.tx_helper = self.add_helper(TransportHelper(ParlayXUSSDTransport))
self.uri = 'http://127.0.0.1:%s%s' % (
self.port, config['web_notification_path'])
def _create_client(transport, config):
return MockParlayXClient()
self.patch(
self.tx_helper.transport_class, '_create_client',
_create_client)
self.transport = yield self.tx_helper.get_transport(
config, start=False)
@inlineCallbacks
def test_ack(self):
"""
Basic message delivery.
"""
yield self.transport.startWorker()
msg = yield self.tx_helper.make_dispatch_outbound("hi")
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
client = self.transport._parlayx_client
self.assertEqual(1, len(client.calls))
linkid = client.calls[0][1][3]
self.assertIdentical(None, linkid)
@inlineCallbacks
def test_ack_linkid(self):
"""
Basic message delivery uses stored ``linkid`` from transport metadata
if available.
"""
yield self.transport.startWorker()
msg = yield self.tx_helper.make_dispatch_outbound(
"hi", transport_metadata={'linkid': 'linkid'})
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'ack')
self.assertEqual(event['user_message_id'], msg['message_id'])
client = self.transport._parlayx_client
self.assertEqual(1, len(client.calls))
linkid = client.calls[0][1][3]
self.assertEqual('linkid', linkid)
@inlineCallbacks
def test_nack(self):
"""
Exceptions raised in an outbound message handler result in the message
delivery failing, and a failure event being logged.
"""
def _create_client(transport, config):
return MockParlayXClient(
send_ussd=partial(fail, ValueError('failed')))
self.patch(
self.tx_helper.transport_class, '_create_client',
_create_client)
yield self.transport.startWorker()
msg = yield self.tx_helper.make_dispatch_outbound("hi")
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['nack_reason'], 'failed')
failures = self.flushLoggedErrors(ValueError)
# Logged once by the transport and once by Twisted for being unhandled.
self.assertEqual(2, len(failures))
@inlineCallbacks
def _test_nack_permanent(self, expected_exception):
"""
The expected exception, when raised in an outbound message handler,
results in a `PermanentFailure` and is logged along with the original
exception.
"""
def _create_client(transport, config):
return MockParlayXClient(
send_ussd=partial(
fail, expected_exception('soapenv:Client', 'failed')))
self.patch(
self.tx_helper.transport_class, '_create_client',
_create_client)
yield self.transport.startWorker()
msg = yield self.tx_helper.make_dispatch_outbound("hi")
[event] = self.tx_helper.get_dispatched_events()
self.assertEqual(event['event_type'], 'nack')
self.assertEqual(event['user_message_id'], msg['message_id'])
self.assertEqual(event['nack_reason'], 'failed')
failures = self.flushLoggedErrors(expected_exception, PermanentFailure)
self.assertEqual(2, len(failures))
def test_nack_service_exception(self):
"""
When `ServiceException` is raised in an outbound message handler, it
results in a `PermanentFailure` exception.
"""
return self._test_nack_permanent(ServiceException)
def test_nack_policy_exception(self):
"""
When `PolicyException` is raised in an outbound message handler, it
results in a `PermanentFailure` exception.
"""
return self._test_nack_permanent(PolicyException)
@inlineCallbacks
def test_receive_ussd(self):
"""
When a text message is submitted to the Vumi ParlayX
``notifyUssdReception`` SOAP endpoint, a message is
published containing the message identifier, message content, from
address and to address that accurately match what was submitted.
"""
yield self.transport.startWorker()
body = create_ussd_reception_element(
'0', '123456', '1', '*909*100#', '27117654321', '909')
yield perform_soap_request(self.uri, '', body)
[msg] = self.tx_helper.get_dispatched_inbound()
# log.debug("received inbound ussd message %")
self.assertEqual(
( 'message', '27117654321', '54321'),
(msg['content'], msg['from_addr'],
msg['to_addr']))
| {
"repo_name": "TouK/vumi",
"path": "vumi/transports/parlayx_ussd/tests/test_parlayx.py",
"copies": "1",
"size": "7652",
"license": "bsd-3-clause",
"hash": 1654363236144027000,
"line_mean": 37.4522613065,
"line_max": 81,
"alpha_frac": 0.6301620491,
"autogenerated": false,
"ratio": 3.960662525879917,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0052481921804142665,
"num_lines": 199
} |
from functools import partial
from twisted.internet import defer, task
from twisted.internet.error import ConnectionDone
from twisted.internet.protocol import DatagramProtocol
from twisted.names import dns
from twisted.python.failure import Failure
from twisted.trial import unittest
from dnsagent.app import App
from dnsagent.resolver.bugfix import BugFixResolver, BugFixDNSDatagramProtocol
from dnsagent.resolver.extended import (
ExtendedResolver, TCPExtendedResolver, ExtendedDNSDatagramProtocol,
)
from dnsagent.server import BugFixDNSServerFactory
from dnsagent.tests import BaseTestResolver, FakeResolver, iplist, FakeTransport
from dnsagent.utils import get_reactor, sequence_deferred_call, async_sleep
class LoseConnectionDNSServerFactory(BugFixDNSServerFactory):
countdown = 100
def sendReply(self, protocol, message, address):
self.countdown -= 1
if self.countdown <= 0:
protocol.transport.loseConnection()
else:
super().sendReply(protocol, message, address)
class TCPOnlyBugFixResolver(BugFixResolver):
def queryUDP(self, queries, timeout=None):
return self.queryTCP(queries)
class TestTCPBugFixResolver(BaseTestResolver):
resolver_cls = TCPOnlyBugFixResolver
server_addr = ('127.0.0.53', 5353)
fake_resolver = FakeResolver()
fake_resolver.set_answer('asdf', '1.2.3.4')
fake_resolver.set_answer('fdsa', '4.3.2.1')
fake_resolver.delay = 0.01
def setUp(self):
super().setUp()
self.server = LoseConnectionDNSServerFactory(resolver=self.fake_resolver)
self.app = App()
self.app.start((self.server, [self.server_addr]))
self.resolver = self.resolver_cls(servers=[self.server_addr])
self.reactor = get_reactor()
@defer.inlineCallbacks
def tearDown(self):
try:
return (yield super().tearDown())
finally:
yield self.app.stop()
def test_success(self):
def check_finished_state():
assert not self.resolver.pending
assert not self.resolver.tcp_waiting
def check_disconnected():
assert not self.resolver.tcp_protocol
def check_waiting_state():
assert not self.resolver.pending
assert len(self.resolver.tcp_waiting) == 2
thread1 = sequence_deferred_call([
partial(defer.DeferredList, [
self.check_a('asdf', iplist('1.2.3.4')),
self.check_a('fdsa', iplist('4.3.2.1')),
], fireOnOneErrback=True),
check_finished_state,
partial(async_sleep, 0.002),
check_disconnected,
])
thread2 = sequence_deferred_call([
partial(async_sleep, 0.005),
check_waiting_state,
])
return defer.DeferredList([thread1, thread2], fireOnOneErrback=True)
def test_connection_lost(self):
self.server.countdown = 2
self.check_a('asdf', iplist('1.2.3.4'))
self.check_a('fdsa', fail=ConnectionDone)
def test_connection_failed_reconnect(self):
class MyException(Exception):
pass
self.check_a('asdf', fail=MyException)
self.resolver.factory.clientConnectionFailed(None, Failure(MyException('asdf')))
# reconnect
self.check_a('fdsa', iplist('4.3.2.1'))
class TestTCPBugFixResolverWithExtended(TestTCPBugFixResolver):
resolver_cls = TCPExtendedResolver
def swallow(ignore):
pass
class TestDNSDatagramProtocolResendsExpiration(unittest.TestCase):
protocol_cls = BugFixDNSDatagramProtocol
discard_host = '127.0.3.3'
discard_port = 3456
discard_addr = (discard_host, discard_port)
query = dns.Query(b'asdf', dns.A, dns.IN)
def setUp(self):
self.clock = task.Clock()
self.protocol = self.protocol_cls(FakeResolver(), reactor=self.clock)
self.protocol.makeConnection(FakeTransport())
self.discard = get_reactor().listenUDP(
self.discard_port, DatagramProtocol(), interface=self.discard_host,
)
def tearDown(self):
assert not self.clock.calls
return defer.maybeDeferred(self.discard.stopListening)
def make_query(self):
d = self.protocol.query(self.discard_addr, [self.query], timeout=5, id=123)
return d.addErrback(swallow)
def test_expire(self):
self.make_query()
assert 123 in self.protocol.liveMessages
assert 123 in self.protocol.resends
self.clock.advance(6)
assert 123 not in self.protocol.liveMessages
assert 123 in self.protocol.resends
self.clock.advance(60)
assert 123 not in self.protocol.resends
self.protocol.doStop()
def test_protocol_stop(self):
self.make_query()
self.clock.advance(6)
self.protocol.doStop()
assert not self.protocol.resends
def test_reissue(self):
self.make_query()
self.clock.advance(6)
self.make_query()
self.protocol.doStop()
assert not self.clock.calls
class TestDNSDatagramProtocolResendsExpirationWithExtended(
TestDNSDatagramProtocolResendsExpiration
):
protocol_cls = ExtendedDNSDatagramProtocol
class DropRequestDNSServerFactory(BugFixDNSServerFactory):
drops = 1
def sendReply(self, protocol, message, address):
if self.drops <= 0:
super().sendReply(protocol, message, address)
else:
self.drops -= 1
class TestReissue(unittest.TestCase):
resolver_cls = BugFixResolver
server_addr = ('127.0.0.54', 5454)
fake_resolver = FakeResolver()
fake_resolver.set_answer('asdf', '1.2.3.4')
query = dns.Query(b'asdf', dns.A, dns.IN)
def setUp(self):
server = DropRequestDNSServerFactory(resolver=self.fake_resolver)
self.app = App()
self.app.start((server, [self.server_addr]))
self.resolver = self.resolver_cls(servers=[self.server_addr])
def tearDown(self):
return self.app.stop()
def test_timeout(self):
def check_failure(result: Failure):
assert isinstance(result, Failure)
result.trap(defer.TimeoutError)
d = self.resolver.query(self.query, timeout=[0.1])
return d.addBoth(check_failure)
def test_reissue(self):
return self.resolver.query(self.query, timeout=[0.01, 0.02])
class TestReissueWithExtended(TestReissue):
resolver_cls = ExtendedResolver
# TODO: TestUDPBugFixResolver
# TODO: test dnsagent.resolver.bugfix.BugFixDNSProtocol#dataReceived
del BaseTestResolver
| {
"repo_name": "account-login/dnsagent",
"path": "dnsagent/tests/test_bugfix_resolver.py",
"copies": "1",
"size": "6622",
"license": "mit",
"hash": -1528304451844030700,
"line_mean": 28.3008849558,
"line_max": 88,
"alpha_frac": 0.6650558744,
"autogenerated": false,
"ratio": 3.838840579710145,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5003896454110145,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from twisted.python import log
from bl.debug import DEBUG
class PageWidget(object):
addressPattern = None
def __init__(self, receiver, callbacks=[], page=1):
self.receiver = receiver
self.page = page
self.callbacks = callbacks
def _callback(self, cb, node, message, address):
(v,) = message.arguments
v = float(v)
if DEBUG:
cn = self.__class__.__name__
log.msg('[%s] %s %s' % (cn, node, v))
cb(v)
def attach(self):
self._recv_callbacks = []
for (idx, cb) in enumerate(self.callbacks):
node = self.addressPattern % (self.page, idx + 1)
recvcb = partial(self._callback, cb, node)
self._recv_callbacks.append((node, recvcb))
self.receiver.addCallback(node, recvcb)
return self
def detach(self):
for (node, recvcb) in self._recv_callbacks:
self.receiver.removeCallback(node, recvcb)
class GridWidget(PageWidget):
def attach(self):
self._recv_callbacks = []
for (j, row) in enumerate(self.callbacks):
for (k, cb) in enumerate(row):
node = self.addressPattern % (self.page, j + 1, k + 1)
recvcb = partial(self._callback, cb, node)
self._recv_callbacks.append((node, recvcb))
self.receiver.addCallback(node, recvcb)
return self
class Push(PageWidget):
addressPattern = '/%d/push%d'
class Fader(PageWidget):
addressPattern = '/%d/fader%d'
class MultiFader(PageWidget):
addressPattern = '/%d/multifader/%d'
class Rotary(PageWidget):
addressPattern = '/%d/rotary%d'
class Toggle(PageWidget):
addressPattern = '/%d/toggle%d'
class MultiToggle(GridWidget):
addressPattern = '/%d/multitoggle/%d/%d'
class MultiFaderGrid(GridWidget):
addressPattern = '/%d/multifader%d/%d'
class XY(object):
def __init__(self, receiver, callback=lambda x, y: None, callbacks=None,
page=1):
self.receiver = receiver
self.callbacks = callbacks
self.callback = callback
self.page = page
def attach(self):
self._recv_callbacks = []
if self.callbacks:
for (idx, cb) in enumerate(self.callbacks):
node = '/%d/xy%d' % (self.page, idx + 1)
recvcb = partial(self._callback, cb, node)
self._recv_callbacks.append((node, recvcb))
self.receiver.addCallback(node, recvcb)
else:
node = '/%d/xy' % self.page
recvcb = partial(self._callback, self.callback, node)
self._recv_callbacks.append((node, recvcb))
self.receiver.addCallback(node, recvcb)
return self
def _callback(self, cb, node, message, address):
(x, y) = message.arguments
x = float(x)
y = float(y)
if DEBUG:
cn = self.__class__.__name__
log.msg('[%s] %s %s %s' % (cn, node, x, y))
cb(x, y)
class TouchOSCStepSequencer:
ledAddressPattern = '/%d/led%d'
def __init__(self, receiver, send, stepSequencer, page=1):
self.stepSequencer = ss = stepSequencer
self.receiver = receiver
self.send = send
self.page = page
# Listeners for multifader and toggle widgets
self._multifader = MultiFader(receiver,
[partial(self.setVelocity, idx) for idx in range(ss.beats)],
page)
self._multitoggle = MultiToggle(receiver,
[[partial(ss.setStep, c, r) for c in range(ss.beats)]
for r in range(len(ss.notes))], page)
def attach(self):
ss = self.stepSequencer
clock = ss.clock
beats = ss.beats
#players = self.players
#self.step = 0
ss.step = 0
# Clear out the multifader and toggle widgets
for beat in range(1, beats + 1):
for index in range(1, len(ss.notes) + 1):
self.send(MultiToggle.addressPattern % (
self.page, index, beat),
ss.on_off[beat - 1][index - 1])
for beat in range(1, beats + 1):
self.send(MultiFader.addressPattern % (self.page, beat),
ss.velocity[beat - 1] / 127.)
self._multifader.attach()
self._multitoggle.attach()
self._led_schedule = clock.schedule(self.updateLEDs
).startLater(1, 1. / beats)
self._refresh_col = 0
self._refresh_ui_schedule = clock.schedule(self.refreshUI
).startLater(1, 0.0625)
return self
def detach(self):
self._led_schedule.stopLater(0)
self._refresh_ui_schedule.stopLater(0)
self._multifader.detach()
self._multitoggle.detach()
def updateLEDs(self):
ss = self.stepSequencer
on = ss.step + 1
off = on - 1
off = off or ss.beats
self.send(self.ledAddressPattern % (self.page, on), 1.0)
self.send(self.ledAddressPattern % (self.page, off), 0.0)
# really only want this for udp, but alas
self.send(self.ledAddressPattern % (self.page, on), 1.0)
self.send(self.ledAddressPattern % (self.page, off), 0.0)
def setVelocity(self, idx, v):
nv = int(v * 127)
self.stepSequencer.setVelocity(idx, nv)
def refreshUI(self):
ss = self.stepSequencer
col = ss.on_off[self._refresh_col]
for (index, on_off) in enumerate(col):
self.send(MultiToggle.addressPattern %
(self.page, index + 1, self._refresh_col + 1), on_off)
for beat in range(ss.beats):
self.send(MultiFader.addressPattern % (self.page, beat + 1),
ss.velocity[beat] / 127.)
self._refresh_col = (self._refresh_col + 1) % ss.beats
| {
"repo_name": "djfroofy/beatlounge",
"path": "bl/osc/touchosc.py",
"copies": "1",
"size": "5917",
"license": "mit",
"hash": -9140468278677423000,
"line_mean": 30.3068783069,
"line_max": 76,
"alpha_frac": 0.5653202636,
"autogenerated": false,
"ratio": 3.6278356836296752,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4693155947229675,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from twtxt.mentions import format_mentions
def mock_mention_format(name, url, expected_name, expected_url):
assert name == expected_name
assert url == expected_url
if name:
return '@' + name
else:
return name
def test_format_mentions():
texts = {'No Mention': 'No Mention',
'@<SomeName http://some.url/twtxt.txt>': ('SomeName', 'http://some.url/twtxt.txt'),
'@<Some>Shitty<Name http://some.url/twtxt.txt>': ('Some>Shitty<Name', 'http://some.url/twtxt.txt'),
'@<http://some.url/twtxt.txt>': (None, 'http://some.url/twtxt.txt'),
'@<SomeName>': '@<SomeName>',
'@SomeName': '@SomeName'}
for input, expected in texts.items():
if isinstance(expected, tuple):
format_mentions(input, partial(mock_mention_format, expected_name=expected[0], expected_url=expected[1]))
else:
assert expected == format_mentions(input,
partial(mock_mention_format, expected_name=None, expected_url=None))
def test_format_multi_mentions():
text = '@<SomeName http://url> and another @<AnotherName http://another/url> end'
mentions = (('SomeName', 'http://url'),
('AnotherName', 'http://another/url'))
def mock_multi_mention_format(name, url):
return '@' + name
format_mentions(text, mock_multi_mention_format)
def test_format_multi_mentions_incomplete():
text = '@<http://url> and another @<AnotherName http://another/url> end'
mentions = ((None, 'http://url'),
('AnotherName', 'http://another/url'))
def mock_multi_mention_format(name, url):
if name:
return '@' + name
else:
return '@' + url
format_mentions(text, mock_multi_mention_format)
text = '@<SomeName http://url> and another @<http://another/url> end'
mentions = (('SomeName', 'http://url'),
(None, 'http://another/url'))
| {
"repo_name": "buckket/twtxt",
"path": "tests/test_mentions.py",
"copies": "1",
"size": "2024",
"license": "mit",
"hash": 8221774495463207000,
"line_mean": 35.1428571429,
"line_max": 117,
"alpha_frac": 0.5839920949,
"autogenerated": false,
"ratio": 3.6600361663652805,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.474402826126528,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from typing import Callable, Any, TypeVar, Generic
from .typing import Functor
from .typing import Monad
from .typing import Applicative
TEnv = TypeVar("TEnv")
TSource = TypeVar("TSource")
TResult = TypeVar("TResult")
class Reader(Generic[TEnv, TSource]):
"""The Reader monad.
The Reader monad pass the state you want to share between functions.
Functions may read that state, but can't change it. The reader monad
lets us access shared immutable state within a monadic context.
The Reader is just a fancy name for a wrapped function, so this
monad could also be called the Function monad, or perhaps the
Callable monad. Reader is all about composing wrapped functions.
"""
def __init__(self, fn: Callable[[TEnv], TSource]) -> None:
"""Initialize a new reader."""
self.fn = fn
@classmethod
def unit(cls, value: TSource) -> "Reader[TEnv, TSource]":
r"""The return function creates a Reader that ignores the
environment and produces the given value.
return a = Reader $ \_ -> a
"""
return cls(lambda _: value)
def map(self, fn: Callable[[TSource], TResult]) -> "Reader[TEnv, TResult]":
r"""Map a function over the Reader.
Haskell:
fmap f m = Reader $ \r -> f (runReader m r).
fmap f g = (\x -> f (g x))
"""
def _compose(x: Any) -> Any:
return fn(self.run(x))
return Reader(_compose)
def bind(self, fn: "Callable[[TSource], Reader[TEnv, TResult]]") -> "Reader[TEnv, TResult]":
r"""Bind a monadic function to the Reader.
Haskell:
Reader: m >>= k = Reader $ \r -> runReader (k (runReader m r)) r
Function: h >>= f = \w -> f (h w) w
"""
return Reader(lambda x: fn(self.run(x)).run(x))
@classmethod
def pure(cls, fn: Callable[[TSource], TResult]) -> "Reader[TEnv, Callable[[TSource], TResult]]":
return Reader.unit(fn)
def apply(
self: "Reader[TEnv, Callable[[TSource], TResult]]", something: "Reader[TEnv, TSource]"
) -> "Reader[TEnv, TResult]":
r"""(<*>) :: f (a -> b) -> f a -> f b.
Haskell: f <*> g = \x -> f x (g x)
Apply (<*>) is a beefed up map. It takes a Reader that
has a function in it and another Reader, and extracts that
function from the first Reader and then maps it over the second
one (composes the two functions).
"""
def comp(env: TEnv):
fn: Callable[[TSource], TResult] = self.run(env)
value: TSource = something.run(env)
try:
return fn(value)
except TypeError:
return partial(fn, value)
return Reader(comp)
def run(self, env: TEnv) -> TSource:
"""Run reader in given environment.
Haskell: runReader :: Reader r a -> r -> a
Applies given environment on wrapped function.
"""
return self.fn(env)
def __call__(self, env: TEnv) -> TSource:
"""Call the wrapped function."""
return self.run(env)
def __str__(self) -> str:
return "Reader(%s)" % repr(self.fn)
def __repr__(self) -> str:
return str(self)
class MonadReader(Reader[TEnv, TSource]):
"""The MonadReader class.
The MonadReader class provides a number of convenience functions
that are very useful when working with a Reader monad.
"""
@classmethod
def ask(cls) -> Reader[TEnv, TEnv]:
r"""Reader $ \x -> x
Provides a way to easily access the environment.
ask lets us read the environment and then play with it
"""
return Reader(lambda x: x)
@classmethod
def asks(cls, fn: Callable[[TEnv], TSource]) -> Reader[TEnv, TSource]:
"""
Given a function it returns a Reader which evaluates that
function and returns the result.
asks :: (e -> a) -> R e a
asks f = do
e <- ask
return $ f e
asks sel = ask >>= return . sel
"""
return cls.ask().bind(lambda env: cls.unit(fn(env)))
def local(self, fn: Callable[[TEnv], TEnv]) -> Reader[TEnv, TSource]:
r"""local transforms the environment a Reader sees.
local f c = Reader $ \e -> runReader c (f e)
"""
return Reader(lambda env: self.run(fn(env)))
assert isinstance(Reader, Functor)
assert isinstance(Reader, Applicative)
assert isinstance(Reader, Monad)
| {
"repo_name": "dbrattli/OSlash",
"path": "oslash/reader.py",
"copies": "1",
"size": "4527",
"license": "apache-2.0",
"hash": 4073795968712686000,
"line_mean": 28.3961038961,
"line_max": 100,
"alpha_frac": 0.5875855975,
"autogenerated": false,
"ratio": 3.781954887218045,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9868542543671324,
"avg_score": 0.00019958820934414965,
"num_lines": 154
} |
from functools import partial
from ubuntui.utils import Color, Padding
from ubuntui.widgets.hr import HR
from ubuntui.widgets.input import Selector
from urwid import (
BoxAdapter,
CheckBox,
Columns,
Edit,
Filler,
Frame,
Pile,
Text,
WidgetWrap
)
from conjureup.app_config import app
from conjureup.telemetry import track_screen
from conjureup.ui.widgets.base import Scrollable
from conjureup.ui.widgets.buttons import (
FooterButton,
SecondaryButton,
SubmitButton
)
from conjureup.ui.widgets.selectors import RadioList
SWAP_FOCUS = 'swap focus'
NEXT_FIELD = 'next field'
PREV_FIELD = 'prev field'
SUBMIT_FIELD = 'submit field'
NEXT_SCREEN = 'next screen'
PREV_SCREEN = 'prev screen'
SHOW_HELP = 'show help'
SCROLL_UP = 'scroll up'
SCROLL_DOWN = 'scroll down'
SCROLL_PAGE_UP = 'scroll page up'
SCROLL_PAGE_DOWN = 'scroll page down'
FORWARD = +1
BACKWARD = -1
class BaseView(WidgetWrap):
title = 'Base View'
subtitle = None
footer = ''
footer_height = 'auto'
footer_align = 'center'
show_back_button = True
body_valign = 'top'
metrics_title = None # in case we want to track screen w/ different name
focusable_widget_types = (
Edit,
CheckBox,
Selector,
RadioList,
SubmitButton,
SecondaryButton,
)
def __init__(self):
"""Create a new instance of this view.
"""
self.frame = Frame(header=Padding.center_90(HR()),
body=self._build_body(),
footer=self._build_footer())
self.extend_command_map({
'meta s': SWAP_FOCUS,
'tab': NEXT_FIELD,
'shift tab': PREV_FIELD,
'enter': SUBMIT_FIELD,
'n': NEXT_SCREEN,
'meta n': NEXT_SCREEN,
'meta right': NEXT_SCREEN,
'meta left': PREV_SCREEN,
'meta b': PREV_SCREEN,
'b': PREV_SCREEN,
'meta h': SHOW_HELP,
'h': SHOW_HELP,
'?': SHOW_HELP,
'up': SCROLL_UP,
'down': SCROLL_DOWN,
'page up': SCROLL_PAGE_UP,
'page down': SCROLL_PAGE_DOWN,
})
self._command_handlers = {
SWAP_FOCUS: self._swap_focus,
NEXT_FIELD: self.next_field,
PREV_FIELD: self.prev_field,
SUBMIT_FIELD: self.submit_field,
NEXT_SCREEN: self.submit,
PREV_SCREEN: self.prev_screen,
SHOW_HELP: self.show_help,
SCROLL_UP: partial(self.scroll, -1),
SCROLL_DOWN: partial(self.scroll, +1),
SCROLL_PAGE_UP: partial(self.scroll, -10),
SCROLL_PAGE_DOWN: partial(self.scroll, +10),
}
super().__init__(self.frame)
def extend_command_map(self, command_mappings):
"""
Extend the command mapping table, which maps keys to command names.
:param dict command_mappings: A mapping of key names to command names.
The command names will be looked up in the command handlers table.
(See :meth:`extend_command_handlers`) As a convenience,
``command_mappings`` can also provide handlers directly instead of
a command name, in which case a placeholder command name of the
form ``key: {key name}`` will be created.
"""
self._command_map = self._command_map.copy()
for key, command in command_mappings.items():
if callable(command):
command_name = 'key: {}'.format(key)
self._command_map[key] = command_name
self._command_handlers[command_name] = command
else:
self._command_map[key] = command
def extend_command_handlers(self, command_handlers):
"""
Extend the command handlers table, which maps command names to
handler functions.
:param dict command_handlers: A mapping of command names to the
functions that are called when that command is invoked.
"""
self._command_handlers.update(command_handlers)
def show(self):
track_screen(self.metrics_title or self.title)
app.ui.set_header(title=self.title,
excerpt=self.subtitle)
app.ui.set_body(self)
@property
def widget(self):
""" The widget returned by ``self.build_widget()``.
"""
return self._widget
def build_widget(self):
""" Build the main widget(s) for the view.
Return a widget, or a list of widgets to be rendered in a Pile,
which will be used as the main body of the view.
This **must** be implemented by a subclass.
"""
raise NotImplementedError()
def build_buttons(self):
""" Build any buttons for the footer.
Should call `self.button(label, callback)` to construct each button,
and return a list of such buttons.
"""
return []
def button(self, label, callback):
""" Build a button for the footer with the given label and callback.
"""
return FooterButton(label, lambda _: callback())
def _build_body(self):
widget = self.build_widget()
if isinstance(widget, list):
widget = Pile(widget)
self._widget = widget
# position widget indented slightly, filled around, and scrollable
return Scrollable(Padding.center_80(widget), valign=self.body_valign)
def set_footer(self, message):
self.footer_msg.set_text(message)
def _build_footer(self):
def _pack(btn):
return ('fixed', len(btn.label) + 4, btn)
buttons = [('fixed', 2, Text("")), _pack(
self.button('QUIT', app.ui.quit))]
if self.show_back_button:
buttons.append(_pack(self.button('BACK', self.prev_screen)))
buttons.append(('weight', 2, Text("")))
buttons.extend([_pack(btn) for btn in self.build_buttons()])
buttons.append(('fixed', 2, Text("")))
self.button_row = Columns(buttons, 2)
self.footer_msg = Text(self.footer, align=self.footer_align)
footer_widget = Columns([
Text(''),
('pack', self.footer_msg),
Text(''),
])
footer_widget = Padding.center_90(self.footer_msg)
if self.footer_height != 'auto':
footer_widget = BoxAdapter(Filler(footer_widget, valign='bottom'),
self.footer_height)
footer = Pile([
Padding.center_90(HR()),
Color.body(footer_widget),
Padding.line_break(""),
Color.frame_footer(Pile([
Padding.line_break(""),
self.button_row,
])),
])
return footer
def prev_screen(self):
"""
Shut down the current view, and move to the previous screen.
This should be implemented by a subclass.
"""
app.log.error('%s: prev_screen not implemented', type(self).__name__)
def next_screen(self):
"""
Shut down the current view, and move to the next screen.
This **must** be implemented by a subclass.
"""
pass
def _check_field(self, field):
"""
Check if a field is acceptable for selecting with :meth:`.next_field`
or :meth:`.prev_field`.
"""
field = field.base_widget # strip any decoration
if not field.selectable():
return False
if isinstance(field, self.focusable_widget_types):
# acceptable to the defense, your honor
return True
if hasattr(field, 'contents'):
# recursively check contents of list-type widget
return any(self._check_field(f[0]) for f in field.contents)
if hasattr(field, '_w') and field._w is not field:
# recursively check wrapped widget
return self._check_field(field._w)
return False
def _select_next_field(self, direction):
if not hasattr(self.widget, 'get_focus_widgets'):
# top-level widget is not a container, nothing to do
return False
focus_path = [self.widget] + self.widget.get_focus_widgets()
while len(focus_path) > 1:
# use -2 to get the selected parent of the leaf widget
container = focus_path[-2]
if hasattr(container, 'contents'):
# widget is in fact a container, try to find a field in it
if direction == FORWARD:
start = container.focus_position + 1
end = len(container.contents)
else:
start = container.focus_position - 1
end = -1 # going backward, range doesn't include end
for new_position in range(start, end, direction):
new_field = container.contents[new_position][0]
if not self._check_field(new_field):
# not a field we want to select
continue
container.focus_position = new_position
return True
focus_path.pop()
else:
# no more fields
return False
def _first_field(self):
while self._select_next_field(direction=BACKWARD):
pass
def _last_field(self):
while self._select_next_field(direction=FORWARD):
pass
def next_field(self, _leave_body=True):
"""
Find and focus the next non-button selectable.
:param bool _leave_body: For internal use only.
:returns: ``True`` if another field was selected, or ``False``.
"""
if self.frame.focus_position == 'footer':
self.frame.focus_position = 'body'
return self._first_field()
if self._select_next_field(FORWARD):
return True
else:
if _leave_body:
self._swap_focus()
return False
def prev_field(self):
"""
Find and focus the previous non-button selectable, wrapping from the
top to the bottom of the body.
:returns: ``True`` if another field was selected, or ``False``.
"""
if self.frame.focus_position == 'footer':
self.frame.focus_position = 'body'
return self._last_field()
if self._select_next_field(BACKWARD):
return True
else:
self._swap_focus()
return False
def submit_field(self):
"""
Submit the current field or view.
By default, this calls ``self.next_field()`` to select the next
input field, and if there are no more input fields,
``self.submit()`` is called.
"""
if self.frame.focus_position == 'footer':
# activate selected button
super().keypress((1, 1), 'enter')
return
# check if current field is submit button
focus_widgets = self.widget.get_focus_widgets()
if focus_widgets:
field = focus_widgets[-1].base_widget
else:
field = None
if isinstance(field, (SubmitButton, SecondaryButton)):
# activate the selected button
field.keypress(1, 'enter')
return
# move to next field, or submit view
if not self.next_field(_leave_body=False):
self.submit()
return
# check if next field is submit button
field = (self.widget.get_focus_widgets() or [None])[-1].base_widget
if isinstance(field, SubmitButton):
# activate the selected button
field.keypress(1, 'enter')
return
def submit(self):
"""
Submit the current form.
By default, this calls ``self.next_screen()`` but most views will
likely want to do some validation or post-processing, or a given view
might have more than one form to submit before moving on. Such views
should override this method.
"""
self.next_screen()
def scroll(self, amount):
self.frame.body.scroll_top += amount
def _swap_focus(self):
if self.frame.focus_position == 'body':
self.frame.focus_position = 'footer'
# select last button
for i, col in reversed(list(enumerate(self.button_row.contents))):
if col[0].selectable():
self.button_row.focus_position = i
break
else:
self.frame.focus_position = 'body'
def keypress(self, size, key):
# try passing through the key first
# (unless enter, as SUBMIT_FIELD should override default behavior)
if key != 'enter':
result = super().keypress(size, key)
if result != key:
self.after_keypress()
return result
# not handled, so dispatch via _command_handlers (see __init__)
command = self._command_map[key]
if command in self._command_handlers:
result = self._command_handlers[command]()
self.after_keypress()
return result
def after_keypress(self):
"""
Will be called after a keypress is handled.
"""
pass
def show_help(self):
help_view = HelpView(close=self.show)
help_view.show()
class HelpView(BaseView):
title = 'Keyboard Navigation'
help_defs = (
("q or Q", "If not in a text entry field, these will quit "
"conjure-up."),
("tab", "This will switch to the next field. If there are no "
"more fields, it will change focus to the button bar at "
"the bottom of the screen."),
("shift tab", "This will switch to the previous field. If on "
"the first field, it will move to the button bar "
"at the bottom. If on the button bar, it will "
"move to the last field."),
("down arrow", "This will move to the next line of a multi-line "
"field, or the next field of a multi-field form. "
"Otherwise, this will scroll the screen if there "
"is more content than will fit on the screen."),
("up arrow", "This will move to the previous line of a "
"multi-line field, or the previous field of a "
"multi-field form. Otherwise, this will scroll the "
"screen if there is more content than will fit on the "
"screen."),
("page down", "This will scroll by 10 lines."),
("page up", "This will scroll by 10 lines."),
("enter", "This will submit the current field and move to the "
"next one, or submit the current form if there are no "
"more input fields."),
("b or meta/alt b", "Go to the previous screen, if any."),
("n or meta/alt n", "Submit the current screen and continue, if "
"possible (some screens may require interaction "
"before continuing)."),
("meta/alt s", "Switch between the button bar and the main "
"window input area."),
("h or ? or meta/alt h", "Show this help screen"),
)
def __init__(self, close):
self.prev_screen = close
super().__init__()
def build_widget(self):
key_col_width = max(len(k) for k, _ in self.help_defs) + 2
lines = []
for key_def, help_text in self.help_defs:
lines.append(Columns([(key_col_width, Text(key_def)),
Text(help_text)]))
lines.append(Text(""))
return lines
class SchemaFormView(BaseView):
def __init__(self, submit_cb, back_cb, *args, **kwargs):
self.submit_cb = submit_cb
self.back_cb = back_cb
super().__init__(*args, **kwargs)
def build_widget(self):
total_items = []
label_width = max(len(f.label or f.key)
for f in app.provider.form.fields()) + 2
for field in app.provider.form.fields():
label = field.label or field.key
total_items.extend([
Columns(
[
(label_width, Text(label, align='right')),
Color.string_input(
field.widget,
focus_map='string_input focus')
], dividechars=1
),
Columns(
[
(label_width, Text("")),
Color.error_major(field.error)
], dividechars=1
),
Padding.line_break(""),
])
return total_items
def build_buttons(self):
return [self.button('SAVE', self.submit)]
def submit(self):
if app.provider.is_valid():
self.submit_cb()
def prev_screen(self):
self.back_cb()
| {
"repo_name": "conjure-up/conjure-up",
"path": "conjureup/ui/views/base.py",
"copies": "2",
"size": "17285",
"license": "mit",
"hash": 2075046830455840500,
"line_mean": 34.060851927,
"line_max": 78,
"alpha_frac": 0.5468325137,
"autogenerated": false,
"ratio": 4.2742334322453015,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 493
} |
from functools import partial
from ..utils import isort_test
attrs_isort_test = partial(isort_test, profile="attrs")
def test_attrs_code_snippet_one():
attrs_isort_test(
"""from __future__ import absolute_import, division, print_function
import sys
from functools import partial
from . import converters, exceptions, filters, setters, validators
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
NOTHING,
Attribute,
Factory,
attrib,
attrs,
fields,
fields_dict,
make_class,
validate,
)
from ._version_info import VersionInfo
__version__ = "20.2.0.dev0"
"""
)
def test_attrs_code_snippet_two():
attrs_isort_test(
"""from __future__ import absolute_import, division, print_function
import copy
import linecache
import sys
import threading
import uuid
import warnings
from operator import itemgetter
from . import _config, setters
from ._compat import (
PY2,
isclass,
iteritems,
metadata_proxy,
ordered_dict,
set_closure_cell,
)
from .exceptions import (
DefaultAlreadySetError,
FrozenInstanceError,
NotAnAttrsClassError,
PythonTooOldError,
UnannotatedAttributeError,
)
# This is used at least twice, so cache it here.
_obj_setattr = object.__setattr__
"""
)
def test_attrs_code_snippet_three():
attrs_isort_test(
'''
"""
Commonly useful validators.
"""
from __future__ import absolute_import, division, print_function
import re
from ._make import _AndValidator, and_, attrib, attrs
from .exceptions import NotCallableError
__all__ = [
"and_",
"deep_iterable",
"deep_mapping",
"in_",
"instance_of",
"is_callable",
"matches_re",
"optional",
"provides",
]
'''
)
| {
"repo_name": "PyCQA/isort",
"path": "tests/unit/profiles/test_attrs.py",
"copies": "1",
"size": "1845",
"license": "mit",
"hash": -315460365857581700,
"line_mean": 17.0882352941,
"line_max": 75,
"alpha_frac": 0.6764227642,
"autogenerated": false,
"ratio": 3.5755813953488373,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4752004159548837,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ..utils import isort_test
django_isort_test = partial(isort_test, profile="django", known_first_party=["django"])
def test_django_snippet_one():
django_isort_test(
"""import copy
import inspect
import warnings
from functools import partialmethod
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
ObjectDoesNotExist, ValidationError,
)
from django.db import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection,
connections, router, transaction,
)
from django.db.models import (
NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value,
)
from django.db.models.constants import LOOKUP_SEP
from django.db.models.constraints import CheckConstraint
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields.related import (
ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation,
)
from django.db.models.functions import Coalesce
from django.db.models.manager import Manager
from django.db.models.options import Options
from django.db.models.query import Q
from django.db.models.signals import (
class_prepared, post_init, post_save, pre_init, pre_save,
)
from django.db.models.utils import make_model_tuple
from django.utils.encoding import force_str
from django.utils.hashable import make_hashable
from django.utils.text import capfirst, get_text_list
from django.utils.translation import gettext_lazy as _
from django.utils.version import get_version
class Deferred:
def __repr__(self):
return '<Deferred field>'
def __str__(self):
return '<Deferred field>'"""
)
def test_django_snippet_two():
django_isort_test(
'''from django.utils.version import get_version
VERSION = (3, 2, 0, 'alpha', 0)
__version__ = get_version(VERSION)
def setup(set_prefix=True):
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
"""
from django.apps import apps
from django.conf import settings
from django.urls import set_script_prefix
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
if set_prefix:
set_script_prefix(
'/' if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
)
apps.populate(settings.INSTALLED_APPS)'''
)
def test_django_snippet_three():
django_isort_test(
"""import cgi
import codecs
import copy
import warnings
from io import BytesIO
from itertools import chain
from urllib.parse import quote, urlencode, urljoin, urlsplit
from django.conf import settings
from django.core import signing
from django.core.exceptions import (
DisallowedHost, ImproperlyConfigured, RequestDataTooBig,
)
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser, MultiPartParserError
from django.utils.datastructures import (
CaseInsensitiveMapping, ImmutableList, MultiValueDict,
)
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.encoding import escape_uri_path, iri_to_uri
from django.utils.functional import cached_property
from django.utils.http import is_same_domain, limited_parse_qsl
from django.utils.regex_helper import _lazy_re_compile
from .multipartparser import parse_header
RAISE_ERROR = object()
class UnreadablePostError(OSError):
pass"""
)
| {
"repo_name": "PyCQA/isort",
"path": "tests/unit/profiles/test_django.py",
"copies": "1",
"size": "3741",
"license": "mit",
"hash": -2340259936317739000,
"line_mean": 29.6639344262,
"line_max": 87,
"alpha_frac": 0.7626303128,
"autogenerated": false,
"ratio": 3.996794871794872,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001884552732818078,
"num_lines": 122
} |
from functools import partial
from ..utils import isort_test
google_isort_test = partial(isort_test, profile="google")
def test_google_code_snippet_shared_example():
"""Tests snippet examples directly shared with the isort project.
See: https://github.com/PyCQA/isort/issues/1486.
"""
google_isort_test(
"""import collections
import cProfile
"""
)
google_isort_test(
"""from a import z
from a.b import c
"""
)
def test_google_code_snippet_one():
google_isort_test(
'''# coding=utf-8
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""JAX user-facing transformations and utilities.
The transformations here mostly wrap internal transformations, providing
convenience flags to control behavior and handling Python containers of
arguments and outputs. The Python containers handled are pytrees (see
tree_util.py), which include nested tuples/lists/dicts, where the leaves are
arrays.
"""
# flake8: noqa: F401
import collections
import functools
import inspect
import itertools as it
import threading
import weakref
from typing import Any, Callable, Iterable, List, NamedTuple, Optional, Sequence, Tuple, TypeVar, Union
from warnings import warn
import numpy as np
from contextlib import contextmanager, ExitStack
from . import core
from . import linear_util as lu
from . import ad_util
from . import dtypes
from .core import eval_jaxpr
from .api_util import (wraps, flatten_fun, apply_flat_fun, flatten_fun_nokwargs,
flatten_fun_nokwargs2, argnums_partial, flatten_axes,
donation_vector, rebase_donate_argnums)
from .traceback_util import api_boundary
from .tree_util import (tree_map, tree_flatten, tree_unflatten, tree_structure,
tree_transpose, tree_leaves, tree_multimap,
treedef_is_leaf, Partial)
from .util import (unzip2, curry, partial, safe_map, safe_zip, prod, split_list,
extend_name_stack, wrap_name, cache)
from .lib import xla_bridge as xb
from .lib import xla_client as xc
# Unused imports to be exported
from .lib.xla_bridge import (device_count, local_device_count, devices,
local_devices, host_id, host_ids, host_count)
from .abstract_arrays import ConcreteArray, ShapedArray, raise_to_shaped
from .interpreters import partial_eval as pe
from .interpreters import xla
from .interpreters import pxla
from .interpreters import ad
from .interpreters import batching
from .interpreters import masking
from .interpreters import invertible_ad as iad
from .interpreters.invertible_ad import custom_ivjp
from .custom_derivatives import custom_jvp, custom_vjp
from .config import flags, config, bool_env
AxisName = Any
# This TypeVar is used below to express the fact that function call signatures
# are invariant under the jit, vmap, and pmap transformations.
# Specifically, we statically assert that the return type is invariant.
# Until PEP-612 is implemented, we cannot express the same invariance for
# function arguments.
# Note that the return type annotations will generally not strictly hold
# in JIT internals, as Tracer values are passed through the function.
# Should this raise any type errors for the tracing code in future, we can disable
# type checking in parts of the tracing code, or remove these annotations.
T = TypeVar("T")
map = safe_map
zip = safe_zip
FLAGS = flags.FLAGS
flags.DEFINE_bool("jax_disable_jit", bool_env("JAX_DISABLE_JIT", False),
"Disable JIT compilation and just call original Python.")
''',
'''# coding=utf-8
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""JAX user-facing transformations and utilities.
The transformations here mostly wrap internal transformations, providing
convenience flags to control behavior and handling Python containers of
arguments and outputs. The Python containers handled are pytrees (see
tree_util.py), which include nested tuples/lists/dicts, where the leaves are
arrays.
"""
# flake8: noqa: F401
import collections
from contextlib import contextmanager
from contextlib import ExitStack
import functools
import inspect
import itertools as it
import threading
from typing import (Any, Callable, Iterable, List, NamedTuple, Optional,
Sequence, Tuple, TypeVar, Union)
from warnings import warn
import weakref
import numpy as np
from . import ad_util
from . import core
from . import dtypes
from . import linear_util as lu
from .abstract_arrays import ConcreteArray
from .abstract_arrays import raise_to_shaped
from .abstract_arrays import ShapedArray
from .api_util import apply_flat_fun
from .api_util import argnums_partial
from .api_util import donation_vector
from .api_util import flatten_axes
from .api_util import flatten_fun
from .api_util import flatten_fun_nokwargs
from .api_util import flatten_fun_nokwargs2
from .api_util import rebase_donate_argnums
from .api_util import wraps
from .config import bool_env
from .config import config
from .config import flags
from .core import eval_jaxpr
from .custom_derivatives import custom_jvp
from .custom_derivatives import custom_vjp
from .interpreters import ad
from .interpreters import batching
from .interpreters import invertible_ad as iad
from .interpreters import masking
from .interpreters import partial_eval as pe
from .interpreters import pxla
from .interpreters import xla
from .interpreters.invertible_ad import custom_ivjp
from .lib import xla_bridge as xb
from .lib import xla_client as xc
# Unused imports to be exported
from .lib.xla_bridge import device_count
from .lib.xla_bridge import devices
from .lib.xla_bridge import host_count
from .lib.xla_bridge import host_id
from .lib.xla_bridge import host_ids
from .lib.xla_bridge import local_device_count
from .lib.xla_bridge import local_devices
from .traceback_util import api_boundary
from .tree_util import Partial
from .tree_util import tree_flatten
from .tree_util import tree_leaves
from .tree_util import tree_map
from .tree_util import tree_multimap
from .tree_util import tree_structure
from .tree_util import tree_transpose
from .tree_util import tree_unflatten
from .tree_util import treedef_is_leaf
from .util import cache
from .util import curry
from .util import extend_name_stack
from .util import partial
from .util import prod
from .util import safe_map
from .util import safe_zip
from .util import split_list
from .util import unzip2
from .util import wrap_name
AxisName = Any
# This TypeVar is used below to express the fact that function call signatures
# are invariant under the jit, vmap, and pmap transformations.
# Specifically, we statically assert that the return type is invariant.
# Until PEP-612 is implemented, we cannot express the same invariance for
# function arguments.
# Note that the return type annotations will generally not strictly hold
# in JIT internals, as Tracer values are passed through the function.
# Should this raise any type errors for the tracing code in future, we can disable
# type checking in parts of the tracing code, or remove these annotations.
T = TypeVar("T")
map = safe_map
zip = safe_zip
FLAGS = flags.FLAGS
flags.DEFINE_bool("jax_disable_jit", bool_env("JAX_DISABLE_JIT", False),
"Disable JIT compilation and just call original Python.")
''',
)
def test_google_code_snippet_two():
google_isort_test(
"""#!/usr/bin/env python
# In[ ]:
# coding: utf-8
###### Searching and Downloading Google Images to the local disk ######
# Import Libraries
import sys
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
import urllib.request
from urllib.request import Request, urlopen
from urllib.request import URLError, HTTPError
from urllib.parse import quote
import http.client
from http.client import IncompleteRead, BadStatusLine
http.client._MAXHEADERS = 1000
else: # If the Current Version of Python is 2.x
import urllib2
from urllib2 import Request, urlopen
from urllib2 import URLError, HTTPError
from urllib import quote
import httplib
from httplib import IncompleteRead, BadStatusLine
httplib._MAXHEADERS = 1000
import time # Importing the time library to check the time of code execution
import os
import argparse
import ssl
import datetime
import json
import re
import codecs
import socket""",
"""#!/usr/bin/env python
# In[ ]:
# coding: utf-8
###### Searching and Downloading Google Images to the local disk ######
# Import Libraries
import sys
version = (3, 0)
cur_version = sys.version_info
if cur_version >= version: # If the Current Version of Python is 3.0 or above
import http.client
from http.client import BadStatusLine
from http.client import IncompleteRead
from urllib.parse import quote
import urllib.request
from urllib.request import HTTPError
from urllib.request import Request
from urllib.request import URLError
from urllib.request import urlopen
http.client._MAXHEADERS = 1000
else: # If the Current Version of Python is 2.x
from urllib import quote
import httplib
from httplib import BadStatusLine
from httplib import IncompleteRead
import urllib2
from urllib2 import HTTPError
from urllib2 import Request
from urllib2 import URLError
from urllib2 import urlopen
httplib._MAXHEADERS = 1000
import argparse
import codecs
import datetime
import json
import os
import re
import socket
import ssl
import time # Importing the time library to check the time of code execution
""",
)
def test_code_snippet_three():
google_isort_test(
'''# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Monitoring."""
# pylint: disable=invalid-name
# TODO(ochang): Remove V3 from names once all metrics are migrated to
# stackdriver.
from builtins import object
from builtins import range
from builtins import str
import bisect
import collections
import functools
import itertools
import re
import six
import threading
import time
try:
from google.cloud import monitoring_v3
except (ImportError, RuntimeError):
monitoring_v3 = None
from google.api_core import exceptions
from google.api_core import retry
from base import errors
from base import utils
from config import local_config
from google_cloud_utils import compute_metadata
from google_cloud_utils import credentials
from metrics import logs
from system import environment''',
'''# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Monitoring."""
# pylint: disable=invalid-name
# TODO(ochang): Remove V3 from names once all metrics are migrated to
# stackdriver.
import bisect
from builtins import object
from builtins import range
from builtins import str
import collections
import functools
import itertools
import re
import threading
import time
import six
try:
from google.cloud import monitoring_v3
except (ImportError, RuntimeError):
monitoring_v3 = None
from base import errors
from base import utils
from config import local_config
from google.api_core import exceptions
from google.api_core import retry
from google_cloud_utils import compute_metadata
from google_cloud_utils import credentials
from metrics import logs
from system import environment
''',
)
| {
"repo_name": "PyCQA/isort",
"path": "tests/unit/profiles/test_google.py",
"copies": "1",
"size": "13379",
"license": "mit",
"hash": 2650775435499632000,
"line_mean": 31.3946731235,
"line_max": 103,
"alpha_frac": 0.7600717542,
"autogenerated": false,
"ratio": 3.9806605177030647,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5240732271903065,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ..utils import isort_test
plone_isort_test = partial(isort_test, profile="plone")
def test_plone_code_snippet_one():
plone_isort_test(
"""# -*- coding: utf-8 -*-
from plone.app.multilingual.testing import PLONE_APP_MULTILINGUAL_PRESET_FIXTURE # noqa
from plone.app.robotframework.testing import REMOTE_LIBRARY_BUNDLE_FIXTURE
from plone.app.testing import FunctionalTesting
from plone.app.testing import IntegrationTesting
from plone.app.testing import PloneWithPackageLayer
from plone.testing import z2
import plone.app.multilingualindexes
PAMI_FIXTURE = PloneWithPackageLayer(
bases=(PLONE_APP_MULTILINGUAL_PRESET_FIXTURE,),
name="PAMILayer:Fixture",
gs_profile_id="plone.app.multilingualindexes:default",
zcml_package=plone.app.multilingualindexes,
zcml_filename="configure.zcml",
additional_z2_products=["plone.app.multilingualindexes"],
)
"""
)
def test_plone_code_snippet_two():
plone_isort_test(
"""# -*- coding: utf-8 -*-
from Acquisition import aq_base
from App.class_init import InitializeClass
from App.special_dtml import DTMLFile
from BTrees.OOBTree import OOTreeSet
from logging import getLogger
from plone import api
from plone.app.multilingual.events import ITranslationRegisteredEvent
from plone.app.multilingual.interfaces import ITG
from plone.app.multilingual.interfaces import ITranslatable
from plone.app.multilingual.interfaces import ITranslationManager
from plone.app.multilingualindexes.utils import get_configuration
from plone.indexer.interfaces import IIndexableObject
from Products.CMFPlone.utils import safe_hasattr
from Products.DateRecurringIndex.index import DateRecurringIndex
from Products.PluginIndexes.common.UnIndex import UnIndex
from Products.ZCatalog.Catalog import Catalog
from ZODB.POSException import ConflictError
from zope.component import getMultiAdapter
from zope.component import queryAdapter
from zope.globalrequest import getRequest
logger = getLogger(__name__)
"""
)
def test_plone_code_snippet_three():
plone_isort_test(
"""# -*- coding: utf-8 -*-
from plone.app.querystring.interfaces import IQueryModifier
from zope.interface import provider
import logging
logger = logging.getLogger(__name__)
"""
)
| {
"repo_name": "PyCQA/isort",
"path": "tests/unit/profiles/test_plone.py",
"copies": "1",
"size": "2280",
"license": "mit",
"hash": 5475636919671588000,
"line_mean": 29.4,
"line_max": 88,
"alpha_frac": 0.7872807018,
"autogenerated": false,
"ratio": 3.449319213313162,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47365999151131616,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from ..utils import isort_test
pycharm_isort_test = partial(isort_test, profile="pycharm")
def test_pycharm_snippet_one():
pycharm_isort_test(
"""import shutil
import sys
from io import StringIO
from pathlib import Path
from typing import (
Optional,
TextIO,
Union,
cast
)
from warnings import warn
from isort import core
from . import io
from .exceptions import (
ExistingSyntaxErrors,
FileSkipComment,
FileSkipSetting,
IntroducedSyntaxErrors
)
from .format import (
ask_whether_to_apply_changes_to_file,
create_terminal_printer,
show_unified_diff
)
from .io import Empty
from .place import module as place_module # noqa: F401
from .place import module_with_reason as place_module_with_reason # noqa: F401
from .settings import (
DEFAULT_CONFIG,
Config
)
def sort_code_string(
code: str,
extension: Optional[str] = None,
config: Config = DEFAULT_CONFIG,
file_path: Optional[Path] = None,
disregard_skip: bool = False,
show_diff: Union[bool, TextIO] = False,
**config_kwargs,
):
"""
)
| {
"repo_name": "PyCQA/isort",
"path": "tests/unit/profiles/test_pycharm.py",
"copies": "1",
"size": "1121",
"license": "mit",
"hash": -8205352681625555000,
"line_mean": 19.3818181818,
"line_max": 79,
"alpha_frac": 0.6966993756,
"autogenerated": false,
"ratio": 3.4281345565749235,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4624833932174923,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .utils import make_caller, async_partial, Undefined
class BaseMaker:
__slots__ = ('_name', '_mediator_factory')
def __delete__(self, instance):
raise AttributeError('Attribute cannot be deleted')
def __set_name__(self, owner, name):
self._name = name
def get_mediator(self, instance):
return self._mediator_factory(self._name, instance)
def init(self, instance):
pass
def bind(self, mediator_factory):
self._mediator_factory = mediator_factory
class ValueMaker(BaseMaker):
__slots__ = ('_default', )
def __init__(self, default=Undefined):
self._name = None
self._default = default
def __get__(self, instance, owner):
if instance is None:
return self
mediator = self.get_mediator(instance)
if mediator.has():
return mediator.get()
if self._default == Undefined:
raise AttributeError()
if callable(self._default):
value = self._default()
else:
value = self._default
mediator.save(value)
return value
def __set__(self, instance, value):
mediator = self.get_mediator(instance)
if mediator.has():
raise AttributeError(
'The value `{name}` has already been set'.format(
name=self._name
)
)
mediator.save(value)
class FactoryMaker(BaseMaker):
__slots__ = (
'_cache', '_readonly', '_inject', '_close_handler',
'_invalidate_after_closed', '_args', '_func'
)
def __init__(
self, func=None, *, cache=True, readonly=False, inject=None, args=None
):
self._cache = cache
self._readonly = readonly
self._inject = inject
self._close_handler = None
self._invalidate_after_closed = False
self._args = args or ['root']
self._func = func
def __get__(self, instance, owner):
if instance is None:
return self
return self._get(self.get_mediator(instance))
def _get(self, mediator):
if mediator.has():
return mediator.get()
value = self._get_func(mediator)
return self._process_value(mediator, value)
def _get_func(self, mediator):
return self._func(*mediator.get_args(self._args))
def _process_value(self, mediator, value):
if self._invalidate_after_closed:
mediator.add_close_handler(make_caller(mediator.invalidate))
if self._close_handler:
mediator.add_close_handler(self._close_handler, value)
if self._cache:
mediator.save(value)
return value
def __set__(self, instance, value):
mediator = self.get_mediator(instance)
if mediator.has():
raise AttributeError(
'The value `{name}` has already been set'.format(
name=self._name
)
)
if self._readonly:
raise AttributeError(
'The value `{name}` is readonly'.format(
name=self._name
)
)
mediator.save(value)
def close(self, handler=None, *, invalidate=False):
def inner(f):
self._invalidate_after_closed = invalidate
self._close_handler = f
return f
if handler is None:
return inner
return inner(handler)
def get_injectable(self, mediator):
return partial(self._get, mediator)
def init(self, instance):
super().init(instance)
if self._inject is None:
return
mediator = self.get_mediator(instance)
mediator.set_inject(self._inject, self.get_injectable(mediator))
class AsyncFactoryMaker(FactoryMaker):
__slots__ = ()
def __get__(self, instance, owner):
if instance is None:
return self
return self._get(self.get_mediator(instance))
async def _get(self, mediator):
if mediator.has():
return mediator.get()
value = await self._get_func(mediator)
return self._process_value(mediator, value)
def get_injectable(self, mediator):
return async_partial(self._get, mediator)
| {
"repo_name": "palankai/baluster",
"path": "src/baluster/makers.py",
"copies": "1",
"size": "4343",
"license": "mit",
"hash": 296506178279214460,
"line_mean": 27.385620915,
"line_max": 78,
"alpha_frac": 0.5650472024,
"autogenerated": false,
"ratio": 4.200193423597679,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5265240625997679,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from .validator import CerberusValidator
from .error import ValidationError
def load(schema, validator, update, request):
if update is None:
update = False
if request.method == "PUT" or request.method == "PATCH":
update = True
v = validator(request=request)
if v.validate(request.json, schema, update=update):
return v.document
else:
raise ValidationError(v.errors)
def loader(schema, validator=CerberusValidator, update=None):
"""Create a load function based on schema dict and Validator class.
:param schema: a Cerberus schema dict.
:param validator: the validator class which must be a subclass of
more.cerberus.CerberusValidator which is the default.
:param update: will pass the update flag to the validator, when ``True``
the ``required`` rules will not be checked.
By default it will be set for PUT and PATCH requests to ``True``
and for other requests to ``False``.
You can plug this ``load`` function into a json view.
Returns a ``load`` function that takes a request JSON body
and uses the schema to validate it. This function raises
:class:`more.cerberus.ValidationError` if validation is not successful.
"""
if not issubclass(validator, CerberusValidator):
raise TypeError(
"Validator must be a subclass of more.cerberus.CerberusValidator"
)
return partial(load, schema, validator, update)
| {
"repo_name": "morepath/more.cerberus",
"path": "more/cerberus/loader.py",
"copies": "1",
"size": "1504",
"license": "bsd-3-clause",
"hash": -6849468244112813000,
"line_mean": 36.6,
"line_max": 77,
"alpha_frac": 0.6914893617,
"autogenerated": false,
"ratio": 4.372093023255814,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5563582384955813,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from validator import decorator
from validator import metadata_helpers
@decorator.register_test(tier=1)
def test_manifest_json_params(err, xpi_manifest=None):
if err.get_resource('has_manifest_json'):
validate_required_id(err)
validate_required_field(err, 'name', validate_name)
validate_required_field(err, 'version', validate_version)
def validate_required_id(err):
manifest_json = err.get_resource('manifest_json').data
if ('applications' in manifest_json and
'gecko' in manifest_json['applications'] and
'id' in manifest_json['applications']['gecko']):
value = manifest_json['applications']['gecko']['id']
validate_id(err, value)
else:
create_missing_field_error(err, 'id')
def validate_required_field(err, field, validate_fn):
manifest_json = err.get_resource('manifest_json').data
if field in manifest_json:
value = manifest_json[field]
validate_fn(err, value)
else:
create_missing_field_error(err, field)
def create_missing_field_error(err, field):
err.error(
('manifest_json', 'field_required', field),
'Your manifest.json is missing a required field',
'Your manifest.json is missing the "{field}" field.'.format(
field=field))
validate_id = partial(metadata_helpers.validate_id, source='manifest.json')
validate_version = partial(
metadata_helpers.validate_version, source='manifest.json')
validate_name = partial(metadata_helpers.validate_name, source='manifest.json')
| {
"repo_name": "mstriemer/amo-validator",
"path": "validator/testcases/manifestjson.py",
"copies": "1",
"size": "1588",
"license": "bsd-3-clause",
"hash": -2290752387873787100,
"line_mean": 33.5217391304,
"line_max": 79,
"alpha_frac": 0.6826196474,
"autogenerated": false,
"ratio": 3.8265060240963855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5009125671496385,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from validator import decorator
from validator import metadata_helpers
@decorator.register_test(tier=1)
def test_package_json_params(err, xpi_package=None):
if err.get_resource('has_package_json'):
validate_required_field(err, 'id', validate_id)
validate_required_field(err, 'name', validate_name)
validate_required_field(err, 'version', validate_version)
def validate_required_field(err, field, validate_fn):
package_json = err.get_resource('package_json')
if field in package_json:
value = package_json[field]
validate_fn(err, value)
else:
err.error(
('package_json', 'field_required', field),
'Your package.json is missing a required field',
'Your package.json is missing the "{field}" field.'.format(
field=field))
validate_id = partial(metadata_helpers.validate_id, source='package.json')
validate_version = partial(
metadata_helpers.validate_version, source='package.json')
validate_name = partial(metadata_helpers.validate_name, source='package.json')
| {
"repo_name": "magopian/amo-validator",
"path": "validator/testcases/packagejson.py",
"copies": "5",
"size": "1116",
"license": "bsd-3-clause",
"hash": 7636792244210592000,
"line_mean": 35,
"line_max": 78,
"alpha_frac": 0.6872759857,
"autogenerated": false,
"ratio": 3.86159169550173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.704886768120173,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from whoosh.qparser import default
from whoosh.query import compound, ranges, terms
from whoosh.query.wrappers import Not
from whoosh.qparser import plugins
from sqlalchemy.sql import and_, or_, not_, sqltypes
from sqlalchemy.sql.expression import bindparam
class ParserError(ValueError):
pass
def get_field(model, field):
try:
return getattr(model, field)
except AttributeError:
return getattr(
model,
getattr(model, '__aliases__', {}).get(field, field)
)
def to_sqlalchemy(model, query):
if isinstance(query, compound.And):
subqueries = map(
partial(to_sqlalchemy, model),
query.subqueries
)
return and_(*subqueries)
elif isinstance(query, compound.Or):
subqueries = map(
partial(to_sqlalchemy, model),
query.subqueries
)
return or_(*subqueries)
elif isinstance(query, ranges.TermRange):
lower = query.start
upper = query.end
field = get_field(model, query.fieldname)
if lower is None:
return field <= upper
elif upper is None:
return field >= lower
else:
return and_(field >= lower, field <= upper)
elif isinstance(query, terms.Term):
field = get_field(model, query.fieldname)
if isinstance(field.property.columns[0].type, (
sqltypes.Integer,
sqltypes.BigInteger,
sqltypes.SmallInteger,
sqltypes.INT,
sqltypes.BIGINT,
sqltypes.SMALLINT,
)):
try:
return int(query.text) == field
except ValueError:
if hasattr(field, 'choices') and query.text in field.choices:
return field.choices.index(query.text) == field
else:
raise ParserError(
'Field {} must be an integer'.format(query.fieldname)
)
else:
return field.op('~')(query.text)
elif isinstance(query, terms.Prefix):
field = get_field(model, query.fieldname)
return field.startswith(query.text[:-1])
elif isinstance(query, terms.Wildcard):
field = get_field(model, query.fieldname)
text = query.text.replace('*', '%').replace('?', '_')
return field.like(text)
elif isinstance(query, Not):
return not_(to_sqlalchemy(model, query.query))
else:
print 'dno?'
print '\t', query
print '\t', type(query)
class QueryParser(default.QueryParser):
def __init__(self, model, default_field):
self.model = model
self.default_field = default_field
super(QueryParser, self).__init__(self.default_field, None)
# We don't want these
self.remove_plugin_class(plugins.BoostPlugin)
self.remove_plugin_class(plugins.PhrasePlugin)
def parse(self, text, normalize=True, debug=False):
parsed = super(QueryParser, self).parse(
text, normalize=normalize, debug=debug,
)
try:
return to_sqlalchemy(self.model, parsed)
except AttributeError as error:
if "'SystemEvent' has no attribute" in error.message:
field = error.message.split("'")[3]
raise ParserError("Invalid field '{}'".format(field))
| {
"repo_name": "tehmaze-labs/mountain",
"path": "mountain/query/parser.py",
"copies": "1",
"size": "3467",
"license": "mit",
"hash": -1435145874483246000,
"line_mean": 29.6814159292,
"line_max": 77,
"alpha_frac": 0.5791750793,
"autogenerated": false,
"ratio": 4.4335038363171355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5512678915617135,
"avg_score": null,
"num_lines": null
} |
from functools import partial
from wtforms import BooleanField, validators, HiddenField, widgets, StringField, DateField, TextAreaField
from wtforms import SelectField as SelectFieldW
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms_alchemy import ModelFieldList
from dexter.forms import ModelForm, FormField, MultiCheckboxField, IntegerField, SelectField, SelectMultipleField, RadioField, YesNoField, FloatField
from dexter.models import * # noqa
QueryRadioField = partial(QuerySelectField, widget=widgets.ListWidget(prefix_label=False), option_widget=widgets.RadioInput())
class DocumentSourceForm(ModelForm):
""" Form for editing a document source. """
class Meta:
model = DocumentSource
only = ['name', 'quoted', 'photographed']
field_args = {'name': {'label': 'Name'}}
id = HiddenField('id', [validators.Optional()])
deleted = HiddenField('deleted', default='0')
person_id = IntegerField('person_id', widget=widgets.HiddenInput())
named = BooleanField('The source is named', default=True)
source_type = RadioField('Type', default='person', choices=[['person', 'Adult'], ['child', 'Child'], ['secondary', 'Secondary (not a person)']])
gender = QueryRadioField('Gender', get_label='name', allow_blank=True, blank_text='?', query_factory=Gender.all)
race = QueryRadioField('Race', get_label='name', allow_blank=True, blank_text='?', query_factory=Race.all)
function = QuerySelectField('Function', get_label='name', allow_blank=True, blank_text='(none)', query_factory=SourceFunction.all)
role = QuerySelectField('Role', get_label='name', allow_blank=True, blank_text='(none)')
age = QuerySelectField('Age', get_label='name', allow_blank=True, blank_text='(none)', query_factory=SourceAge.all)
affiliation = QuerySelectField('Affiliation', get_label='full_name', allow_blank=True, blank_text='(none)')
def __init__(self, document, *args, **kwargs):
self.role.kwargs['query_factory'] = lambda: document.analysis_nature.roles
self.affiliation.kwargs['query_factory'] = lambda: Affiliation.organisations(document.country)
super(DocumentSourceForm, self).__init__(*args, **kwargs)
@property
def source(self):
""" the associated source object, if any """
return self._obj
def is_new(self):
return self.source is None
def is_empty(self):
return self.is_new() and self.named.data and not self.name.data
def is_deleted(self):
return self.deleted.data == '1'
def validate(self):
if self.source_type.data == 'person':
self.role.data = None
self.age.data = None
# link to a person
if self.named.data and self.name.data:
self.person_id.data = Person.get_or_create(self.name.data).id
elif self.source_type.data == 'child':
self.person_id.data = None
self.function.data = None
self.affiliation.data = None
elif self.source_type.data == 'secondary':
self.person_id.data = None
self.gender.data = None
self.race.data = None
self.role.data = None
self.age.data = None
self.named.data = True
# ignore some data, based on the source type
if not self.named.data:
# it's anonymous, so ignore the name field
self.name.data = None
return super(DocumentSourceForm, self).validate()
def populate_obj(self, obj):
# the form only deals with person_id to make life simpler,
# so if it's set, also set the person field. Do this
# before everything else so the race and gender
# get set on the person directly in the super() call.
if self.person_id.data:
obj.person = Person.query.get(self.person_id.data)
obj.name = None
super(DocumentSourceForm, self).populate_obj(obj)
if self.is_new():
# a newly created source
obj.manual = True
obj.id = None
# the form only deals with person_id to make life simpler,
# so if it's set, also set the person field
if obj.person:
# override the 'quoted' attribute if we know this person has
# utterances in this document
if any(obj.person == u.entity.person for u in obj.document.utterances):
obj.quoted = True
if not obj.person.gender:
obj.person.guess_gender_from_doc(obj.document)
# if it's linked to a person, clear the other crap
# the form sets
if obj.person and obj.named:
obj.unnamed = False
obj.unnamed_gender = None
obj.unnamed_race = None
class DocumentAnalysisForm(ModelForm):
class Meta:
model = Document
only = ['flagged', 'notes']
sources = ModelFieldList(FormField(DocumentSourceForm))
def __init__(self, *args, **kwargs):
# pass the document into the DocumentSourcesForm constructor
self.sources.args[0].kwargs['form_kwargs'] = {'document': kwargs.get('obj')}
super(DocumentAnalysisForm, self).__init__(*args, **kwargs)
@property
def non_new_sources(self):
return [s.form for s in self.sources if not s.form.is_new()]
@property
def new_sources(self):
return [s.form for s in self.sources if s.form.is_new()]
class AnchorAnalysisForm(DocumentAnalysisForm):
"""
Anchor (automated) analysis of a document
"""
issues = MultiCheckboxField('Issues')
topic_id = SelectField('Topic')
origin_location_id = SelectField('Origin')
quality_basic_context = BooleanField('Basic context')
quality_causes = BooleanField('Causes are mentioned')
quality_policies = BooleanField('Relevant policies are mentioned')
quality_solutions = BooleanField('Solutions are offered')
quality_consequences = BooleanField('Consequences are mentioned')
quality_self_help = BooleanField('Self-help offered')
def __init__(self, *args, **kwargs):
super(AnchorAnalysisForm, self).__init__(*args, **kwargs)
country = self._obj.country
self.origin_location_id.choices = [['', '(none)']] + [
[str(loc.id), loc.name] for loc in Location.for_country(country)]
nature = self._obj.analysis_nature
self.issues.choices = sorted([(str(issue.id), issue.name) for issue in nature.issues], key=lambda i: i[1])
self.topic_id.choices = [['', '(none)']] + Topic.for_select_widget(nature.topics)
@property
def quality_fields(self):
return [
self.quality_basic_context,
self.quality_causes,
self.quality_consequences,
self.quality_solutions,
self.quality_policies,
self.quality_self_help,
]
class FDIAnalysisForm(ModelForm):
class Meta:
model = Investment
"""
FDI (Manual) analysis of a document
"""
name = StringField('Project name', [validators.Length(max=200)])
value = FloatField('Investment value (Forex)', [validators.NumberRange(min=0, max=10000)])
value2 = FloatField('Investment value (Rands)', [validators.NumberRange(min=0, max=10000)])
temp_opps = IntegerField('Temporary opportunities', [validators.NumberRange(min=0, max=1000000,
message='Please enter an integer')])
perm_opps = IntegerField('Job opportunities', [validators.NumberRange(min=0, max=1000000,
message='Please enter an integer')])
investment_begin = DateField('Investment start date', [validators.Optional()], format='%Y/%m/%d')
investment_end = DateField('Investment end date', [validators.Optional()], format='%Y/%m/%d')
phase_date = DateField('Phase date', [validators.Optional()], format='%Y/%m/%d')
currency_id = SelectField('Currency')
phase_id = SelectField('Phase')
sector_id = SelectField('Sector')
involvement_id1 = SelectField('Government involvement (Tier 1)')
involvement_id2 = SelectField('Government involvement (Tier 2)', default=73)
involvement_id3 = SelectField('Government involvement (Tier 3)', default=19)
industry_id = SelectField('Industry')
target_market = StringField('Target Market')
invest_origin_id = SelectField('Origin of investment (country)')
province_id = SelectField('Province')
invest_origin_city = StringField('Origin of investment (city)')
invest_type_id = SelectField('Type')
company = StringField('Company')
gov_programs = StringField('Government Programmes')
soc_programs = StringField('Social Benefit Programmes')
mot_investment = StringField('Motivation for Investment')
additional_place = StringField('Additional place')
fdi_notes = TextAreaField('Notes')
value_unit_id = SelectField('Unit')
value_unit_id2 = SelectField('Unit 2', [validators.Optional()])
def __init__(self, *args, **kwargs):
super(FDIAnalysisForm, self).__init__(*args, **kwargs)
self.currency_id.choices = [[str(c.id), c.name] for c in Currencies.all()]
self.invest_origin_id.choices = [[str(c.id), c.name] for c in InvestmentOrigins.all()]
self.phase_id.choices = [[str(c.id), c.name] for c in Phases.all()]
self.sector_id.choices = [[str(c.id), c.name] for c in Sectors.all()]
self.invest_type_id.choices = [[str(c.id), c.name] for c in InvestmentType.all()]
self.value_unit_id.choices = [[str(c.id), c.name] for c in ValueUnits.all()]
self.value_unit_id2.choices = [[str(c.id), c.name] for c in ValueUnits.all()]
self.involvement_id1.choices = [[str(c.id), c.name] for c in Involvements1.all()]
self.involvement_id2.choices = [[str(c.id), c.name] for c in Involvements2.all()]
self.involvement_id3.choices = [[str(c.id), c.name] for c in Involvements3.all()]
self.industry_id.choices = [[str(c.id), c.name] for c in Industries.all()]
self.province_id.choices = [[str(c.id), c.name] for c in Provinces.all()]
def validate(self):
return super(FDIAnalysisForm, self).validate()
def populate_obj(self, obj):
super(FDIAnalysisForm, self).populate_obj(obj)
class ElectionsAnalysisForm(AnchorAnalysisForm):
"""
Analysis of a document from an elections standpoint.
"""
pass
class ChildrenAnalysisForm(ElectionsAnalysisForm):
"""
Analysis of a document from a children standpoint.
"""
child_focus = YesNoField('Children are a central focus?', [validators.Optional()])
abuse_source = BooleanField('Child is a source')
abuse_identified = BooleanField("Child's identity is disclosed")
abuse_victim = BooleanField('Child is a victim of abuse')
principle_supported_id = RadioField('Principle strongly supported', [validators.Optional()], default='')
principle_violated_id = RadioField('Principle clearly violated', [validators.Optional()], default='')
def __init__(self, *args, **kwargs):
super(ChildrenAnalysisForm, self).__init__(*args, **kwargs)
principles = Principle.query.all()
self.principle_descriptions = dict((p.name, p.description) for p in principles)
self.principle_supported_id.choices = [['', '(none)']] + [(str(p.id), p.name) for p in principles]
self.principle_violated_id.choices = self.principle_supported_id.choices
@property
def abuse_fields(self):
return [
self.abuse_source,
self.abuse_identified,
self.abuse_victim,
]
| {
"repo_name": "Code4SA/mma-dexter",
"path": "dexter/analysis/forms.py",
"copies": "1",
"size": "11836",
"license": "apache-2.0",
"hash": 6784642379375105000,
"line_mean": 42.04,
"line_max": 149,
"alpha_frac": 0.6370395404,
"autogenerated": false,
"ratio": 3.7754385964912283,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9899915134513984,
"avg_score": 0.002512600475449044,
"num_lines": 275
} |
from functools import partial
from zipline.utils.compat import mappingproxy
def _make_metrics_set_core():
"""Create a family of metrics sets functions that read from the same
metrics set mapping.
Returns
-------
metrics_sets : mappingproxy
The mapping of metrics sets to load functions.
register : callable
The function which registers new metrics sets in the ``metrics_sets``
mapping.
unregister : callable
The function which deregisters metrics sets from the ``metrics_sets``
mapping.
load : callable
The function which loads the ingested metrics sets back into memory.
"""
_metrics_sets = {}
# Expose _metrics_sets through a proxy so that users cannot mutate this
# accidentally. Users may go through `register` to update this which will
# warn when trampling another metrics set.
metrics_sets = mappingproxy(_metrics_sets)
def register(name, function=None):
"""Register a new metrics set.
Parameters
----------
name : str
The name of the metrics set
function : callable
The callable which produces the metrics set.
Notes
-----
This may be used as a decorator if only ``name`` is passed.
See Also
--------
zipline.finance.metrics.get_metrics_set
zipline.finance.metrics.unregister_metrics_set
"""
if function is None:
# allow as decorator with just name.
return partial(register, name)
if name in _metrics_sets:
raise ValueError('metrics set %r is already registered' % name)
_metrics_sets[name] = function
return function
def unregister(name):
"""Unregister an existing metrics set.
Parameters
----------
name : str
The name of the metrics set
See Also
--------
zipline.finance.metrics.register_metrics_set
"""
try:
del _metrics_sets[name]
except KeyError:
raise ValueError(
'metrics set %r was not already registered' % name,
)
def load(name):
"""Return an instance of the metrics set registered with the given name.
Returns
-------
metrics : set[Metric]
A new instance of the metrics set.
Raises
------
ValueError
Raised when no metrics set is registered to ``name``
"""
try:
function = _metrics_sets[name]
except KeyError:
raise ValueError(
'no metrics set registered as %r, options are: %r' % (
name,
sorted(_metrics_sets),
),
)
return function()
return metrics_sets, register, unregister, load
metrics_sets, register, unregister, load = _make_metrics_set_core()
| {
"repo_name": "quantopian/zipline",
"path": "zipline/finance/metrics/core.py",
"copies": "1",
"size": "2972",
"license": "apache-2.0",
"hash": -4834133303947294000,
"line_mean": 27.0377358491,
"line_max": 80,
"alpha_frac": 0.5730148048,
"autogenerated": false,
"ratio": 5.011804384485666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00011646866992778941,
"num_lines": 106
} |
from functools import partial
import asyncio
import theblues.charmstore
import theblues.errors
class CharmStore:
"""
Async wrapper around theblues.charmstore.CharmStore
"""
def __init__(self, loop, cs_timeout=20):
self.loop = loop
self._cs = theblues.charmstore.CharmStore(timeout=cs_timeout)
def __getattr__(self, name):
"""
Wrap method calls in coroutines that use run_in_executor to make them
async.
"""
attr = getattr(self._cs, name)
if not callable(attr):
wrapper = partial(getattr, self._cs, name)
setattr(self, name, wrapper)
else:
async def coro(*args, **kwargs):
method = partial(attr, *args, **kwargs)
for attempt in range(1, 4):
try:
return await self.loop.run_in_executor(None, method)
except theblues.errors.ServerError:
if attempt == 3:
raise
await asyncio.sleep(1, loop=self.loop)
setattr(self, name, coro)
wrapper = coro
return wrapper
| {
"repo_name": "juju/python-libjuju",
"path": "juju/charmstore.py",
"copies": "1",
"size": "1188",
"license": "apache-2.0",
"hash": -1858997365103037700,
"line_mean": 31.1081081081,
"line_max": 77,
"alpha_frac": 0.5378787879,
"autogenerated": false,
"ratio": 4.304347826086956,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5342226613986956,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.