hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bbf2ddcc64c4d9303364a48415c3e71141476b01 | 2,386 | py | Python | engine/render.py | suoxinkey/PlenOctrees_NeRF-SH | bf66ecc93ee166a4dc1a208d063408b452484dde | [
"MIT"
] | 54 | 2021-04-13T14:51:40.000Z | 2022-03-09T12:49:46.000Z | engine/render.py | suoxinkey/PlenOctrees_NeRF-SH | bf66ecc93ee166a4dc1a208d063408b452484dde | [
"MIT"
] | 2 | 2021-04-17T02:05:41.000Z | 2021-12-06T12:09:38.000Z | engine/render.py | suoxinkey/PlenOctrees_NeRF-SH | bf66ecc93ee166a4dc1a208d063408b452484dde | [
"MIT"
] | 2 | 2021-04-17T16:22:20.000Z | 2021-08-23T05:17:04.000Z | import torch
from utils import batchify_ray, vis_density, ray_sampling
import numpy as np
import os
import torch
'''
Sample rays from views (and images) with/without masks
--------------------------
INPUT Tensors
K: intrinsics of camera (3,3)
T: extrinsic of camera (4,4)
image_size: the size of image [H,W]
ROI: 2D ROI bboxes (4) left up corner(x,y) followed the height and width (h,w)
masks:(M,H,W)
-------------------
OUPUT:
list of rays: (N,6) dirs(3) + pos(3)
RGB: (N,C)
'''
def render(model, K, T, img_size,ROI = None, bboxes = None,only_coarse = False,near_far=None):
model.eval()
assert not (bboxes is None and near_far is None), ' either bbox or near_far should not be None.'
mask = torch.ones(img_size[0],img_size[1])
if ROI is not None:
mask = torch.zeros(img_size[0],img_size[1])
mask[ROI[0]:ROI[0]+ROI[2], ROI[1]:ROI[1]+ROI[3]] = 1.0
rays,_ = ray_sampling(K.unsqueeze(0), T.unsqueeze(0), img_size, masks=mask.unsqueeze(0))
if bboxes is not None:
bboxes = bboxes.unsqueeze(0).repeat(rays.size(0),1,1)
print('near_far ', near_far)
with torch.no_grad():
stage2, stage1,_ = batchify_ray(model, rays, bboxes,near_far = near_far)
rgb = torch.zeros(img_size[0],img_size[1], 3, device = stage2[0].device)
rgb[mask>0.5,:] = stage2[0]
depth = torch.zeros(img_size[0],img_size[1],1, device = stage2[1].device)
depth[mask>0.5,:] = stage2[1]
alpha = torch.zeros(img_size[0],img_size[1],1, device = stage2[2].device)
alpha[mask>0.5,:] = stage2[2]
stage2_final = [None]*3
stage2_final[0] = rgb.reshape(img_size[0],img_size[1], 3)
stage2_final[1] = depth.reshape(img_size[0],img_size[1])
stage2_final[2] = alpha.reshape(img_size[0],img_size[1])
rgb = torch.zeros(img_size[0],img_size[1], 3, device = stage1[0].device)
rgb[mask>0.5,:] = stage1[0]
depth = torch.zeros(img_size[0],img_size[1],1, device = stage1[1].device)
depth[mask>0.5,:] = stage1[1]
alpha = torch.zeros(img_size[0],img_size[1],1, device = stage1[2].device)
alpha[mask>0.5,:] = stage1[2]
stage1_final = [None]*3
stage1_final[0] = rgb.reshape(img_size[0],img_size[1], 3)
stage1_final[1] = depth.reshape(img_size[0],img_size[1])
stage1_final[2] = alpha.reshape(img_size[0],img_size[1])
return stage2_final, stage1_final
| 29.45679 | 100 | 0.638726 |
fbcc7cf679daf341bb36ab70199154eda4564daf | 1,060 | py | Python | noxfile.py | raman325/cookiecutter-homeassistant-custom-component | a546052798905646d78b446ce20f2b2554e468c2 | [
"MIT"
] | 28 | 2020-11-14T18:03:42.000Z | 2022-03-05T11:22:12.000Z | noxfile.py | raman325/cookiecutter-homeassistant-custom-component | a546052798905646d78b446ce20f2b2554e468c2 | [
"MIT"
] | 133 | 2020-10-30T09:25:49.000Z | 2022-03-31T04:05:15.000Z | noxfile.py | raman325/cookiecutter-homeassistant-custom-component | a546052798905646d78b446ce20f2b2554e468c2 | [
"MIT"
] | 9 | 2021-01-06T03:16:45.000Z | 2022-02-28T18:22:19.000Z | """Nox sessions."""
from pathlib import Path
import shutil
import nox
from nox.sessions import Session
nox.options.sessions = (
"linkcheck",
)
@nox.session
def docs(session: Session) -> None:
"""Build the documentation."""
args = session.posargs or ["-W", "-n", "docs", "docs/_build"]
if session.interactive and not session.posargs:
args = ["-a", "--watch=docs/_static", "--open-browser", *args]
builddir = Path("docs", "_build")
if builddir.exists():
shutil.rmtree(builddir)
session.install("-r", "docs/requirements.txt")
if session.interactive:
session.run("sphinx-autobuild", *args)
else:
session.run("sphinx-build", *args)
@nox.session
def linkcheck(session: Session) -> None:
"""Build the documentation."""
args = session.posargs or ["-W", "-b", "linkcheck", "docs", "docs/_build"]
builddir = Path("docs", "_build")
if builddir.exists():
shutil.rmtree(builddir)
session.install("-r", "docs/requirements.txt")
session.run("sphinx-build", *args)
| 24.651163 | 78 | 0.631132 |
dfa79c208b049698bd7d25fe9f75455c74bb100b | 31,134 | py | Python | grumpy-tools-src/grumpy_tools/compiler/stmt.py | alanjds/grumpy | c5700607ae7f66c355a053118a62992d1ec28404 | [
"Apache-2.0"
] | 19 | 2018-03-27T07:46:26.000Z | 2020-04-18T12:05:33.000Z | grumpy-tools-src/grumpy_tools/compiler/stmt.py | alanjds/grumpy | c5700607ae7f66c355a053118a62992d1ec28404 | [
"Apache-2.0"
] | 147 | 2018-08-03T16:18:53.000Z | 2018-08-20T15:15:11.000Z | grumpy-tools-src/grumpy_tools/compiler/stmt.py | alanjds/grumpy | c5700607ae7f66c355a053118a62992d1ec28404 | [
"Apache-2.0"
] | 1 | 2018-07-28T14:17:35.000Z | 2018-07-28T14:17:35.000Z | # coding=utf-8
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Visitor class for traversing Python statements."""
from __future__ import unicode_literals
import string
import textwrap
from grumpy_tools.compiler import block
from grumpy_tools.compiler import expr
from grumpy_tools.compiler import expr_visitor
from grumpy_tools.compiler import imputil
from grumpy_tools.compiler import util
from grumpy_tools.vendor.pythonparser import algorithm
from grumpy_tools.vendor.pythonparser import ast
_NATIVE_TYPE_PREFIX = 'type_'
# Partial list of known vcs for go module import
# Full list can be found at https://golang.org/src/cmd/go/vcs.go
# TODO: Use official vcs.go module instead of partial list
_KNOWN_VCS = [
'golang.org', 'github.com', 'bitbucket.org', 'git.apache.org',
'git.openstack.org', 'launchpad.net'
]
_nil_expr = expr.nil_expr
class StatementVisitor(algorithm.Visitor):
"""Outputs Go statements to a Writer for the given Python nodes."""
# pylint: disable=invalid-name,missing-docstring
def __init__(self, block_, future_node=None):
self.block = block_
self.future_node = future_node
self.writer = util.Writer()
self.expr_visitor = expr_visitor.ExprVisitor(self)
def generic_visit(self, node):
msg = 'node not yet implemented: {}'.format(type(node).__name__)
raise util.ParseError(node, msg)
def visit_expr(self, node):
return self.expr_visitor.visit(node)
def visit_Assert(self, node):
self._write_py_context(node.lineno)
# TODO: Only evaluate msg if cond is false.
with self.visit_expr(node.msg) if node.msg else _nil_expr as msg,\
self.visit_expr(node.test) as cond:
self.writer.write_checked_call1(
'πg.Assert(πF, {}, {})', cond.expr, msg.expr)
def visit_AugAssign(self, node):
op_type = type(node.op)
if op_type not in StatementVisitor._AUG_ASSIGN_TEMPLATES:
fmt = 'augmented assignment op not implemented: {}'
raise util.ParseError(node, fmt.format(op_type.__name__))
self._write_py_context(node.lineno)
with self.visit_expr(node.target) as target,\
self.visit_expr(node.value) as value,\
self.block.alloc_temp() as temp:
self.writer.write_checked_call2(
temp, StatementVisitor._AUG_ASSIGN_TEMPLATES[op_type],
lhs=target.expr, rhs=value.expr)
self._assign_target(node.target, temp.expr)
def visit_Assign(self, node):
self._write_py_context(node.lineno)
with self.visit_expr(node.value) as value:
for target in node.targets:
self._tie_target(target, value.expr)
def visit_Break(self, node):
if not self.block.loop_stack:
raise util.ParseError(node, "'break' not in loop")
self._write_py_context(node.lineno)
self.writer.write_tmpl(textwrap.dedent("""\
$breakvar = true
continue"""), breakvar=self.block.top_loop().breakvar.name)
def visit_ClassDef(self, node):
# Since we only care about global vars, we end up throwing away the locals
# collected by BlockVisitor. But use it anyway since it buys us detection of
# assignment to vars that are later declared global.
block_visitor = block.BlockVisitor()
for child in node.body:
block_visitor.visit(child)
global_vars = {v.name for v in block_visitor.vars.values()
if v.type == block.Var.TYPE_GLOBAL}
# Visit all the statements inside body of the class definition.
body_visitor = StatementVisitor(block.ClassBlock(
self.block, node.name, global_vars), self.future_node)
# Indent so that the function body is aligned with the goto labels.
with body_visitor.writer.indent_block():
body_visitor._visit_each(node.body) # pylint: disable=protected-access
self._write_py_context(node.lineno)
with self.block.alloc_temp('*πg.Dict') as cls, \
self.block.alloc_temp() as mod_name, \
self.block.alloc_temp('[]*πg.Object') as bases, \
self.block.alloc_temp() as meta:
self.writer.write('{} = make([]*πg.Object, {})'.format(
bases.expr, len(node.bases)))
for i, b in enumerate(node.bases):
with self.visit_expr(b) as b:
self.writer.write('{}[{}] = {}'.format(bases.expr, i, b.expr))
self.writer.write('{} = πg.NewDict()'.format(cls.name))
self.writer.write_checked_call2(
mod_name, 'πF.Globals().GetItem(πF, {}.ToObject())',
self.block.root.intern('__name__'))
self.writer.write_checked_call1(
'{}.SetItem(πF, {}.ToObject(), {})',
cls.expr, self.block.root.intern('__module__'), mod_name.expr)
tmpl = textwrap.dedent("""
_, πE = πg.NewCode($name, $filename, nil, 0, func(πF *πg.Frame, _ []*πg.Object) (*πg.Object, *πg.BaseException) {
\tπClass := $cls
\t_ = πClass""")
self.writer.write_tmpl(tmpl, name=util.go_str(node.name),
filename=util.go_str(self.block.root.filename),
cls=cls.expr)
with self.writer.indent_block():
self.writer.write_temp_decls(body_visitor.block)
self.writer.write_block(body_visitor.block,
body_visitor.writer.getvalue())
self.writer.write('return nil, nil')
tmpl = textwrap.dedent("""\
}).Eval(πF, πF.Globals(), nil, nil)
if πE != nil {
\tcontinue
}
if $meta, πE = $cls.GetItem(πF, $metaclass_str.ToObject()); πE != nil {
\tcontinue
}
if $meta == nil {
\t$meta = πg.TypeType.ToObject()
}""")
self.writer.write_tmpl(
tmpl, meta=meta.name, cls=cls.expr,
metaclass_str=self.block.root.intern('__metaclass__'))
with self.block.alloc_temp() as type_:
type_expr = ('{}.Call(πF, []*πg.Object{{πg.NewStr({}).ToObject(), '
'πg.NewTuple({}...).ToObject(), {}.ToObject()}}, nil)')
self.writer.write_checked_call2(
type_, type_expr, meta.expr,
util.go_str(node.name), bases.expr, cls.expr)
self.block.bind_var(self.writer, node.name, type_.expr)
def visit_Continue(self, node):
if not self.block.loop_stack:
raise util.ParseError(node, "'continue' not in loop")
self._write_py_context(node.lineno)
self.writer.write('continue')
def visit_Delete(self, node):
self._write_py_context(node.lineno)
for target in node.targets:
if isinstance(target, ast.Attribute):
with self.visit_expr(target.value) as t:
self.writer.write_checked_call1(
'πg.DelAttr(πF, {}, {})', t.expr,
self.block.root.intern(target.attr))
elif isinstance(target, ast.Name):
self.block.del_var(self.writer, target.id)
elif isinstance(target, ast.Subscript):
with self.visit_expr(target.value) as t,\
self.visit_expr(target.slice) as index:
self.writer.write_checked_call1('πg.DelItem(πF, {}, {})',
t.expr, index.expr)
else:
msg = 'del target not implemented: {}'.format(type(target).__name__)
raise util.ParseError(node, msg)
def visit_Expr(self, node):
self._write_py_context(node.lineno)
self.visit_expr(node.value).free()
def visit_For(self, node):
with self.block.alloc_temp() as i:
with self.visit_expr(node.iter) as iter_expr:
self.writer.write_checked_call2(i, 'πg.Iter(πF, {})', iter_expr.expr)
def testfunc(testvar):
with self.block.alloc_temp() as n:
self.writer.write_tmpl(textwrap.dedent("""\
if $n, πE = πg.Next(πF, $i); πE != nil {
\tisStop, exc := πg.IsInstance(πF, πE.ToObject(), πg.StopIterationType.ToObject())
\tif exc != nil {
\t\tπE = exc
\t} else if isStop {
\t\tπE = nil
\t\tπF.RestoreExc(nil, nil)
\t}
\t$testvar = !isStop
} else {
\t$testvar = true"""), n=n.name, i=i.expr, testvar=testvar.name)
with self.writer.indent_block():
self._tie_target(node.target, n.expr)
self.writer.write('}')
self._visit_loop(testfunc, node)
def visit_FunctionDef(self, node):
self._write_py_context(node.lineno + len(node.decorator_list))
func = self.visit_function_inline(node)
self.block.bind_var(self.writer, node.name, func.expr)
while node.decorator_list:
decorator = node.decorator_list.pop()
wrapped = ast.Name(id=node.name)
decorated = ast.Call(func=decorator, args=[wrapped], keywords=[],
starargs=None, kwargs=None)
target = ast.Assign(targets=[wrapped], value=decorated, loc=node.loc)
self.visit_Assign(target)
def visit_Global(self, node):
self._write_py_context(node.lineno)
def visit_If(self, node):
# Collect the nodes for each if/elif/else body and write the dispatching
# switch statement.
bodies = []
# An elif clause is represented as a single If node within the orelse
# section of the previous If node. Thus this loop terminates once we are
# done all the elif clauses at which time the orelse var will contain the
# nodes (if any) for the else clause.
orelse = [node]
while len(orelse) == 1 and isinstance(orelse[0], ast.If):
ifnode = orelse[0]
with self.visit_expr(ifnode.test) as cond:
label = self.block.genlabel()
# We goto the body of the if statement instead of executing it inline
# because the body itself may be a goto target and Go does not support
# jumping to targets inside a block.
with self.block.alloc_temp('bool') as is_true:
self.writer.write_tmpl(textwrap.dedent("""\
if $is_true, πE = πg.IsTrue(πF, $cond); πE != nil {
\tcontinue
}
if $is_true {
\tgoto Label$label
}"""), is_true=is_true.name, cond=cond.expr, label=label)
bodies.append((label, ifnode.body, ifnode.lineno))
orelse = ifnode.orelse
default_label = end_label = self.block.genlabel()
if orelse:
end_label = self.block.genlabel()
# The else is not represented by ast and thus there is no lineno.
bodies.append((default_label, orelse, None))
self.writer.write('goto Label{}'.format(default_label))
# Write the body of each clause.
for label, body, lineno in bodies:
if lineno:
self._write_py_context(lineno)
self.writer.write_label(label)
self._visit_each(body)
self.writer.write('goto Label{}'.format(end_label))
self.writer.write_label(end_label)
def visit_Import(self, node):
self._write_py_context(node.lineno)
for imp in self.block.root.importer.visit(node):
self._import_and_bind(imp)
def visit_ImportFrom(self, node):
self._write_py_context(node.lineno)
if node.module == '__future__' and node != self.future_node:
raise util.LateFutureError(node)
for imp in self.block.root.importer.visit(node):
self._import_and_bind(imp)
def visit_Module(self, node):
self._visit_each(node.body)
def visit_Pass(self, node):
self._write_py_context(node.lineno)
def visit_Print(self, node):
if self.block.root.future_features.print_function:
raise util.ParseError(node, 'syntax error (print is not a keyword)')
self._write_py_context(node.lineno)
with self.block.alloc_temp('[]*πg.Object') as args:
self.writer.write('{} = make([]*πg.Object, {})'.format(
args.expr, len(node.values)))
for i, v in enumerate(node.values):
with self.visit_expr(v) as arg:
self.writer.write('{}[{}] = {}'.format(args.expr, i, arg.expr))
self.writer.write_checked_call1('πg.Print(πF, {}, {})', args.expr,
'true' if node.nl else 'false')
def visit_Raise(self, node):
with self.visit_expr(node.exc) if node.exc else _nil_expr as t,\
self.visit_expr(node.inst) if node.inst else _nil_expr as inst,\
self.visit_expr(node.tback) if node.tback else _nil_expr as tb:
if node.inst:
assert node.exc, 'raise had inst but no type'
if node.tback:
assert node.inst, 'raise had tback but no inst'
self._write_py_context(node.lineno)
self.writer.write('πE = πF.Raise({}, {}, {})'.format(
t.expr, inst.expr, tb.expr))
self.writer.write('continue')
def visit_Return(self, node):
assert isinstance(self.block, block.FunctionBlock)
self._write_py_context(node.lineno)
if self.block.is_generator and node.value:
raise util.ParseError(node, 'returning a value in a generator function')
if node.value:
with self.visit_expr(node.value) as value:
self.writer.write('πR = {}'.format(value.expr))
else:
self.writer.write('πR = πg.None')
self.writer.write('continue')
def visit_Try(self, node):
# The general structure generated by this method is shown below:
#
# checkpoints.Push(Except)
# <try body>
# Checkpoints.Pop()
# <else body>
# goto Finally
# Except:
# <dispatch table>
# Handler1:
# <handler 1 body>
# Checkpoints.Pop() // Finally
# goto Finally
# Handler2:
# <handler 2 body>
# Checkpoints.Pop() // Finally
# goto Finally
# ...
# Finally:
# <finally body>
#
# The dispatch table maps the current exception to the appropriate handler
# label according to the exception clauses.
# Write the try body.
self._write_py_context(node.lineno)
finally_label = self.block.genlabel(is_checkpoint=bool(node.finalbody))
if node.finalbody:
self.writer.write('πF.PushCheckpoint({})'.format(finally_label))
except_label = None
if node.handlers:
except_label = self.block.genlabel(is_checkpoint=True)
self.writer.write('πF.PushCheckpoint({})'.format(except_label))
self._visit_each(node.body)
if except_label:
self.writer.write('πF.PopCheckpoint()') # except_label
if node.orelse:
self._visit_each(node.orelse)
if node.finalbody:
self.writer.write('πF.PopCheckpoint()') # finally_label
self.writer.write('goto Label{}'.format(finally_label))
with self.block.alloc_temp('*πg.BaseException') as exc:
if except_label:
with self.block.alloc_temp('*πg.Traceback') as tb:
self.writer.write_label(except_label)
self.writer.write_tmpl(textwrap.dedent("""\
if πE == nil {
continue
}
πE = nil
$exc, $tb = πF.ExcInfo()"""), exc=exc.expr, tb=tb.expr)
handler_labels = self._write_except_dispatcher(
exc.expr, tb.expr, node.handlers)
# Write the bodies of each of the except handlers.
for handler_label, except_node in zip(handler_labels, node.handlers):
self._write_except_block(handler_label, exc.expr, except_node)
if node.finalbody:
self.writer.write('πF.PopCheckpoint()') # finally_label
self.writer.write('goto Label{}'.format(finally_label))
# Write the finally body.
self.writer.write_label(finally_label)
if node.finalbody:
with self.block.alloc_temp('*πg.Traceback') as tb:
self.writer.write('{}, {} = πF.RestoreExc(nil, nil)'.format(
exc.expr, tb.expr))
self._visit_each(node.finalbody)
self.writer.write_tmpl(textwrap.dedent("""\
if $exc != nil {
\tπE = πF.Raise($exc.ToObject(), nil, $tb.ToObject())
\tcontinue
}
if πR != nil {
\tcontinue
}"""), exc=exc.expr, tb=tb.expr)
def visit_While(self, node):
self._write_py_context(node.lineno)
def testfunc(testvar):
with self.visit_expr(node.test) as cond:
self.writer.write_checked_call2(
testvar, 'πg.IsTrue(πF, {})', cond.expr)
self._visit_loop(testfunc, node)
def visit_With(self, node):
assert len(node.items) == 1, 'multiple items in a with not yet supported'
item = node.items[0]
self._write_py_context(node.loc.line())
# mgr := EXPR
with self.visit_expr(item.context_expr) as mgr,\
self.block.alloc_temp() as exit_func,\
self.block.alloc_temp() as value:
# The code here has a subtle twist: It gets the exit function attribute
# from the class, not from the object. This matches the pseudo code from
# PEP 343 exactly, and is very close to what CPython actually does. (The
# CPython implementation actually uses a special lookup which is performed
# on the object, but skips the instance dictionary: see ceval.c and
# lookup_maybe in typeobject.c.)
# exit := type(mgr).__exit__
self.writer.write_checked_call2(
exit_func, 'πg.GetAttr(πF, {}.Type().ToObject(), {}, nil)',
mgr.expr, self.block.root.intern('__exit__'))
# value := type(mgr).__enter__(mgr)
self.writer.write_checked_call2(
value, 'πg.GetAttr(πF, {}.Type().ToObject(), {}, nil)',
mgr.expr, self.block.root.intern('__enter__'))
self.writer.write_checked_call2(
value, '{}.Call(πF, πg.Args{{{}}}, nil)',
value.expr, mgr.expr)
finally_label = self.block.genlabel(is_checkpoint=True)
self.writer.write('πF.PushCheckpoint({})'.format(finally_label))
if item.optional_vars:
self._tie_target(item.optional_vars, value.expr)
self._visit_each(node.body)
self.writer.write('πF.PopCheckpoint()')
self.writer.write_label(finally_label)
with self.block.alloc_temp() as swallow_exc,\
self.block.alloc_temp('bool') as swallow_exc_bool,\
self.block.alloc_temp('*πg.BaseException') as exc,\
self.block.alloc_temp('*πg.Traceback') as tb,\
self.block.alloc_temp('*πg.Type') as t:
# temp := exit(mgr, *sys.exec_info())
tmpl = """\
$exc, $tb = nil, nil
if πE != nil {
\t$exc, $tb = πF.ExcInfo()
}
if $exc != nil {
\t$t = $exc.Type()
\tif $swallow_exc, πE = $exit_func.Call(πF, πg.Args{$mgr, $t.ToObject(), $exc.ToObject(), $tb.ToObject()}, nil); πE != nil {
\t\tcontinue
\t}
} else {
\tif $swallow_exc, πE = $exit_func.Call(πF, πg.Args{$mgr, πg.None, πg.None, πg.None}, nil); πE != nil {
\t\tcontinue
\t}
}
"""
self.writer.write_tmpl(
textwrap.dedent(tmpl), exc=exc.expr, tb=tb.expr, t=t.name,
mgr=mgr.expr, exit_func=exit_func.expr,
swallow_exc=swallow_exc.name)
# if Exc != nil && swallow_exc != true {
# Raise(nil, nil)
# }
self.writer.write_checked_call2(
swallow_exc_bool, 'πg.IsTrue(πF, {})', swallow_exc.expr)
self.writer.write_tmpl(textwrap.dedent("""\
if $exc != nil && $swallow_exc != true {
\tπE = πF.Raise(nil, nil, nil)
\tcontinue
}
if πR != nil {
\tcontinue
}"""), exc=exc.expr, swallow_exc=swallow_exc_bool.expr)
def visit_function_inline(self, node):
"""Returns an GeneratedExpr for a function with the given body."""
# First pass collects the names of locals used in this function. Do this in
# a separate pass so that we know whether to resolve a name as a local or a
# global during the second pass.
func_visitor = block.FunctionBlockVisitor(node)
for child in node.body:
func_visitor.visit(child)
func_block = block.FunctionBlock(self.block, node.name, func_visitor.vars,
func_visitor.is_generator)
visitor = StatementVisitor(func_block, self.future_node)
# Indent so that the function body is aligned with the goto labels.
with visitor.writer.indent_block():
visitor._visit_each(node.body) # pylint: disable=protected-access
result = self.block.alloc_temp()
with self.block.alloc_temp('[]πg.Param') as func_args:
args = node.args
argc = len(args.args)
self.writer.write('{} = make([]πg.Param, {})'.format(
func_args.expr, argc))
# The list of defaults only contains args for which a default value is
# specified so pad it with None to make it the same length as args.
defaults = [None] * (argc - len(args.defaults)) + args.defaults
for i, (a, d) in enumerate(zip(args.args, defaults)):
with self.visit_expr(d) if d else expr.nil_expr as default:
tmpl = '$args[$i] = πg.Param{Name: $name, Def: $default}'
self.writer.write_tmpl(tmpl, args=func_args.expr, i=i,
name=util.go_str(a.arg), default=default.expr)
flags = []
if args.vararg:
flags.append('πg.CodeFlagVarArg')
if args.kwarg:
flags.append('πg.CodeFlagKWArg')
# The function object gets written to a temporary writer because we need
# it as an expression that we subsequently bind to some variable.
self.writer.write_tmpl(
'$result = πg.NewFunction(πg.NewCode($name, $filename, $args, '
'$flags, func(πF *πg.Frame, πArgs []*πg.Object) '
'(*πg.Object, *πg.BaseException) {',
result=result.name, name=util.go_str(node.name),
filename=util.go_str(self.block.root.filename), args=func_args.expr,
flags=' | '.join(flags) if flags else 0)
with self.writer.indent_block():
for var in func_block.vars.values():
if var.type != block.Var.TYPE_GLOBAL:
fmt = 'var {0} *πg.Object = {1}; _ = {0}'
self.writer.write(fmt.format(
util.adjust_local_name(var.name), var.init_expr))
self.writer.write_temp_decls(func_block)
self.writer.write('var πR *πg.Object; _ = πR')
self.writer.write('var πE *πg.BaseException; _ = πE')
if func_block.is_generator:
self.writer.write(
'return πg.NewGenerator(πF, func(πSent *πg.Object) '
'(*πg.Object, *πg.BaseException) {')
with self.writer.indent_block():
self.writer.write_block(func_block, visitor.writer.getvalue())
self.writer.write('return nil, πE')
self.writer.write('}).ToObject(), nil')
else:
self.writer.write_block(func_block, visitor.writer.getvalue())
self.writer.write(textwrap.dedent("""\
if πE != nil {
\tπR = nil
} else if πR == nil {
\tπR = πg.None
}
return πR, πE"""))
self.writer.write('}), πF.Globals()).ToObject()')
return result
_AUG_ASSIGN_TEMPLATES = {
ast.Add: 'πg.IAdd(πF, {lhs}, {rhs})',
ast.BitAnd: 'πg.IAnd(πF, {lhs}, {rhs})',
ast.Div: 'πg.IDiv(πF, {lhs}, {rhs})',
ast.FloorDiv: 'πg.IFloorDiv(πF, {lhs}, {rhs})',
ast.LShift: 'πg.ILShift(πF, {lhs}, {rhs})',
ast.Mod: 'πg.IMod(πF, {lhs}, {rhs})',
ast.Mult: 'πg.IMul(πF, {lhs}, {rhs})',
ast.BitOr: 'πg.IOr(πF, {lhs}, {rhs})',
ast.Pow: 'πg.IPow(πF, {lhs}, {rhs})',
ast.RShift: 'πg.IRShift(πF, {lhs}, {rhs})',
ast.Sub: 'πg.ISub(πF, {lhs}, {rhs})',
ast.BitXor: 'πg.IXor(πF, {lhs}, {rhs})',
}
def _assign_target(self, target, value):
if isinstance(target, ast.Name):
self.block.bind_var(self.writer, target.id, value)
elif isinstance(target, ast.Attribute):
with self.visit_expr(target.value) as obj:
self.writer.write_checked_call1(
'πg.SetAttr(πF, {}, {}, {})', obj.expr,
self.block.root.intern(target.attr), value)
elif isinstance(target, ast.Subscript):
with self.visit_expr(target.value) as mapping,\
self.visit_expr(target.slice) as index:
self.writer.write_checked_call1('πg.SetItem(πF, {}, {}, {})',
mapping.expr, index.expr, value)
else:
msg = 'assignment target not yet implemented: ' + type(target).__name__
raise util.ParseError(target, msg)
def _build_assign_target(self, target, assigns):
if isinstance(target, (ast.Tuple, ast.List)):
children = []
for elt in target.elts:
children.append(self._build_assign_target(elt, assigns))
tmpl = 'πg.TieTarget{Children: []πg.TieTarget{$children}}'
return string.Template(tmpl).substitute(children=', '.join(children))
temp = self.block.alloc_temp()
assigns.append((target, temp))
tmpl = 'πg.TieTarget{Target: &$temp}'
return string.Template(tmpl).substitute(temp=temp.name)
def _import_and_bind(self, imp):
"""Generates code that imports a module and binds it to a variable.
Args:
imp: Import object representing an import of the form "import x.y.z" or
"from x.y import z". Expects only a single binding.
"""
# Acquire handles to the Code objects in each Go package and call
# ImportModule to initialize all modules.
with self.block.alloc_temp() as mod, \
self.block.alloc_temp('[]*πg.Object') as mod_slice:
self.writer.write_checked_call2(
mod_slice, 'πg.ImportModule(πF, {})', util.go_str(imp.name))
# Bind the imported modules or members to variables in the current scope.
for binding in imp.bindings:
if binding.bind_type == imputil.Import.MODULE:
self.writer.write('{} = {}[{}]'.format(
mod.name, mod_slice.expr, binding.value))
self.block.bind_var(self.writer, binding.alias, mod.expr)
elif binding.bind_type == imputil.Import.STAR:
self.writer.write_checked_call1('πg.LoadMembers(πF, {}[0])', mod_slice.name)
else:
self.writer.write('{} = {}[{}]'.format(
mod.name, mod_slice.expr, imp.name.count('.')))
# Binding a member of the imported module.
with self.block.alloc_temp() as member:
self.writer.write_checked_call2(
member, 'πg.GetAttr(πF, {}, {}, nil)',
mod.expr, self.block.root.intern(binding.value))
self.block.bind_var(self.writer, binding.alias, member.expr)
def _tie_target(self, target, value):
if isinstance(target, ast.Name):
self._assign_target(target, value)
return
assigns = []
self.writer.write_checked_call1(
'πg.Tie(πF, {}, {})',
self._build_assign_target(target, assigns), value)
for t, temp in assigns:
self._assign_target(t, temp.expr)
self.block.free_temp(temp)
def _visit_each(self, nodes):
for node in nodes:
self.visit(node)
def _visit_loop(self, testfunc, node):
start_label = self.block.genlabel(is_checkpoint=True)
else_label = self.block.genlabel(is_checkpoint=True)
end_label = self.block.genlabel()
with self.block.alloc_temp('bool') as breakvar:
self.block.push_loop(breakvar)
self.writer.write('πF.PushCheckpoint({})'.format(else_label))
self.writer.write('{} = false'.format(breakvar.name))
self.writer.write_label(start_label)
self.writer.write_tmpl(textwrap.dedent("""\
if πE != nil || πR != nil {
\tcontinue
}
if $breakvar {
\tπF.PopCheckpoint()
\tgoto Label$end_label
}"""), breakvar=breakvar.expr, end_label=end_label)
with self.block.alloc_temp('bool') as testvar:
testfunc(testvar)
self.writer.write_tmpl(textwrap.dedent("""\
if πE != nil || !$testvar {
\tcontinue
}
πF.PushCheckpoint($start_label)\
"""), testvar=testvar.name, start_label=start_label)
self._visit_each(node.body)
self.writer.write('continue')
# End the loop so that break applies to an outer loop if present.
self.block.pop_loop()
self.writer.write_label(else_label)
self.writer.write(textwrap.dedent("""\
if πE != nil || πR != nil {
\tcontinue
}"""))
if node.orelse:
self._visit_each(node.orelse)
self.writer.write_label(end_label)
def _write_except_block(self, label, exc, except_node):
self._write_py_context(except_node.lineno)
self.writer.write_label(label)
if except_node.name:
self.block.bind_var(self.writer, except_node.name.id,
'{}.ToObject()'.format(exc))
self._visit_each(except_node.body)
self.writer.write('πF.RestoreExc(nil, nil)')
def _write_except_dispatcher(self, exc, tb, handlers):
"""Outputs a Go code that jumps to the appropriate except handler.
Args:
exc: Go variable holding the current exception.
tb: Go variable holding the current exception's traceback.
handlers: A list of ast.ExceptHandler nodes.
Returns:
A list of Go labels indexes corresponding to the exception handlers.
Raises:
ParseError: Except handlers are in an invalid order.
"""
handler_labels = []
for i, except_node in enumerate(handlers):
handler_labels.append(self.block.genlabel())
if except_node.type:
with self.visit_expr(except_node.type) as type_,\
self.block.alloc_temp('bool') as is_inst:
self.writer.write_checked_call2(
is_inst, 'πg.IsInstance(πF, {}.ToObject(), {})', exc, type_.expr)
self.writer.write_tmpl(textwrap.dedent("""\
if $is_inst {
\tgoto Label$label
}"""), is_inst=is_inst.expr, label=handler_labels[-1])
else:
# This is a bare except. It should be the last handler.
if i != len(handlers) - 1:
msg = "default 'except:' must be last"
raise util.ParseError(except_node, msg)
self.writer.write('goto Label{}'.format(handler_labels[-1]))
if handlers[-1].type:
# There's no bare except, so the fallback is to re-raise.
self.writer.write(
'πE = πF.Raise({}.ToObject(), nil, {}.ToObject())'.format(exc, tb))
self.writer.write('continue')
return handler_labels
def _write_py_context(self, lineno):
if lineno:
line = self.block.root.buffer.source_line(lineno).strip()
self.writer.write('// line {}: {}'.format(lineno, line))
self.writer.write('πF.SetLineno({})'.format(lineno))
| 41.401596 | 136 | 0.624077 |
c5153d63941e2d2464b8740e5aa1a0b2987d798f | 9,877 | py | Python | pypuppetdb/api/v3.py | grandich/pypuppetdb | f509d41aaca67c0c44b5324c3dcad27c38ba26c3 | [
"Apache-2.0"
] | null | null | null | pypuppetdb/api/v3.py | grandich/pypuppetdb | f509d41aaca67c0c44b5324c3dcad27c38ba26c3 | [
"Apache-2.0"
] | null | null | null | pypuppetdb/api/v3.py | grandich/pypuppetdb | f509d41aaca67c0c44b5324c3dcad27c38ba26c3 | [
"Apache-2.0"
] | null | null | null | from __future__ import unicode_literals
from __future__ import absolute_import
import logging
from pypuppetdb.api import BaseAPI
from pypuppetdb.utils import json_to_datetime
from datetime import datetime, timedelta
from pypuppetdb.types import (
Node, Fact, Resource,
Report, Event, Catalog
)
log = logging.getLogger(__name__)
class API(BaseAPI):
"""The API object for version 3 of the PuppetDB API. This object contains
all v3 specific methods and ways of doing things.
:param \*\*kwargs: Rest of the keywoard arguments passed on to our parent\
:class:`~pypuppetdb.api.BaseAPI`.
"""
def __init__(self, *args, **kwargs):
"""Initialise the API object."""
super(API, self).__init__(api_version=3, **kwargs)
log.debug('API initialised with {0}.'.format(kwargs))
def node(self, name):
"""Gets a single node from PuppetDB."""
nodes = self.nodes(name=name)
return next(node for node in nodes)
def nodes(self, name=None, query=None, unreported=2, with_status=False):
"""Query for nodes by either name or query. If both aren't
provided this will return a list of all nodes. This method
also fetches the nodes status and event counts of the latest
report from puppetdb.
:param name: (optional)
:type name: :obj:`None` or :obj:`string`
:param query: (optional)
:type query: :obj:`None` or :obj:`string`
:param with_status: (optional) include the node status in the\
returned nodes
:type with_status: :bool:
:param unreported: (optional) amount of hours when a node gets
marked as unreported
:type unreported: :obj:`None` or integer
:returns: A generator yieling Nodes.
:rtype: :class:`pypuppetdb.types.Node`
"""
nodes = self._query('nodes', path=name, query=query)
# If we happen to only get one node back it
# won't be inside a list so iterating over it
# goes boom. Therefor we wrap a list around it.
if type(nodes) == dict:
nodes = [nodes, ]
if with_status:
latest_events = self._query(
'event-counts',
query='["=","latest-report?",true]',
summarize_by='certname')
for node in nodes:
node['unreported_time'] = None
node['status'] = None
if with_status:
status = [s for s in latest_events
if s['subject']['title'] == node['name']]
# node status from events
if with_status and status:
node['events'] = status = status[0]
if status['successes'] > 0:
node['status'] = 'changed'
if status['noops'] > 0:
node['status'] = 'noop'
if status['failures'] > 0:
node['status'] = 'failed'
else:
if with_status:
node['status'] = 'unchanged'
node['events'] = None
# node report age
if with_status and node['report_timestamp'] is not None:
try:
last_report = json_to_datetime(node['report_timestamp'])
last_report = last_report.replace(tzinfo=None)
now = datetime.utcnow()
unreported_border = now-timedelta(hours=unreported)
if last_report < unreported_border:
delta = (datetime.utcnow()-last_report)
node['status'] = 'unreported'
node['unreported_time'] = '{0}d {1}h {2}m'.format(
delta.days,
int(delta.seconds/3600),
int((delta.seconds % 3600)/60)
)
except AttributeError:
node['status'] = 'unreported'
if not node['report_timestamp'] and with_status:
node['status'] = 'unreported'
yield Node(self,
node['name'],
deactivated=node['deactivated'],
report_timestamp=node['report_timestamp'],
catalog_timestamp=node['catalog_timestamp'],
facts_timestamp=node['facts_timestamp'],
status=node['status'],
events=node['events'],
unreported_time=node['unreported_time']
)
def facts(self, name=None, value=None, query=None):
"""Query for facts limited by either name, value and/or query.
This will yield a single Fact object at a time."""
log.debug('{0}, {1}, {2}'.format(name, value, query))
if name is not None and value is not None:
path = '{0}/{1}'.format(name, value)
elif name is not None and value is None:
path = name
elif name is None and value is None and query is not None:
path = None
else:
log.debug("We want to query for all facts.")
query = ''
path = None
facts = self._query('facts', path=path, query=query)
for fact in facts:
yield Fact(
fact['certname'],
fact['name'],
fact['value'],
)
def fact_names(self):
"""Get a list of all known facts."""
return self._query('fact-names')
def resources(self, type_=None, title=None, query=None):
"""Query for resources limited by either type and/or title or query.
This will yield a Resources object for every returned resource."""
path = None
if type_ is not None:
type_ = self._normalize_resource_type(type_)
if title is not None:
path = '{0}/{1}'.format(type_, title)
elif title is None:
path = type_
elif query is None:
log.debug('Going to query for all resources. This is usually a '
'bad idea as it might return enormous amounts of '
'resources.')
resources = self._query('resources', path=path, query=query)
for resource in resources:
yield Resource(
resource['certname'],
resource['title'],
resource['type'],
resource['tags'],
resource['exported'],
resource['file'],
resource['line'],
resource['parameters'],
)
def reports(self, query):
"""Get reports for our infrastructure. Currently reports can only
be filtered through a query which requests a specific certname.
If not it will return all reports.
This yields a Report object for every returned report."""
reports = self._query('reports', query=query)
for report in reports:
yield Report(
report['certname'],
report['hash'],
report['start-time'],
report['end-time'],
report['receive-time'],
report['configuration-version'],
report['report-format'],
report['puppet-version'],
report['transaction-uuid']
)
def events(self, query, order_by=None, limit=None):
"""A report is made up of events. This allows to query for events
based on the reprt hash.
This yields an Event object for every returned event."""
events = self._query('events', query=query,
order_by=order_by, limit=limit)
for event in events:
yield Event(
event['certname'],
event['status'],
event['timestamp'],
event['report'],
event['resource-title'],
event['property'],
event['message'],
event['new-value'],
event['old-value'],
event['resource-type'],
event['containing-class'],
event['containment-path'],
event['file'],
event['line'],
)
def event_counts(self, query, summarize_by,
count_by=None, count_filter=None):
"""Get event counts from puppetdb"""
return self._query('event-counts',
query=query,
summarize_by=summarize_by,
count_by=count_by,
count_filter=count_filter)
def aggregate_event_counts(self, query, summarize_by,
count_by=None, count_filter=None):
"""Get event counts from puppetdb"""
return self._query('aggregate-event-counts',
query=query, summarize_by=summarize_by,
count_by=count_by, count_filter=count_filter)
def server_time(self):
"""Get the current time of the clock on the PuppetDB server"""
return self._query('server-time')['server-time']
def current_version(self):
"""Get version information about the running PuppetDB server"""
return self._query('version')['version']
def catalog(self, node):
"""Get the most recent catalog for a given node"""
c = self._query('catalogs', path=node)
return Catalog(c['data']['name'],
c['data']['edges'],
c['data']['resources'],
c['data']['version'],
c['data']['transaction-uuid'])
| 38.135135 | 78 | 0.523337 |
111c9eb55e0b5ad6188685018b5029b7e7d0dea0 | 7,169 | py | Python | spektral/data/utils.py | colliner/spektral | b776200fd1fa820f05b559f0c1c6265e0eca4894 | [
"MIT"
] | null | null | null | spektral/data/utils.py | colliner/spektral | b776200fd1fa820f05b559f0c1c6265e0eca4894 | [
"MIT"
] | null | null | null | spektral/data/utils.py | colliner/spektral | b776200fd1fa820f05b559f0c1c6265e0eca4894 | [
"MIT"
] | null | null | null | import numpy as np
import tensorflow as tf
from scipy import sparse as sp
from spektral.utils import pad_jagged_array
def to_disjoint(x_list=None, a_list=None, e_list=None):
"""
Converts lists of node features, adjacency matrices and edge features to
[disjoint mode](https://danielegrattarola.github.io/spektral/data/#disjoint-mode).
Either the node features or the adjacency matrices must be provided as input.
The i-th element of each list must be associated with the i-th graph.
The method also computes the batch index to retrieve individual graphs
from the disjoint union.
Edge attributes can be represented as:
- a dense array of shape `(n_nodes, n_nodes, n_edge_features)`;
- a sparse edge list of shape `(n_edges, n_edge_features)`;
and they will always be returned as edge list for efficiency.
:param x_list: a list of np.arrays of shape `(n_nodes, n_node_features)`
-- note that `n_nodes` can change between graphs;
:param a_list: a list of np.arrays or scipy.sparse matrices of shape
`(n_nodes, n_nodes)`;
:param e_list: a list of np.arrays of shape
`(n_nodes, n_nodes, n_edge_features)` or `(n_edges, n_edge_features)`;
:return: only if the corresponding list is given as input:
- `x`: np.array of shape `(n_nodes, n_node_features)`;
- `a`: scipy.sparse matrix of shape `(n_nodes, n_nodes)`;
- `e`: (optional) np.array of shape `(n_edges, n_edge_features)`;
- `i`: np.array of shape `(n_nodes, )`;
"""
if a_list is None and x_list is None:
raise ValueError('Need at least x_list or a_list.')
# Node features
x_out = None
if x_list is not None:
x_out = np.vstack(x_list)
# Adjacency matrix
a_out = None
if a_list is not None:
a_out = sp.block_diag(a_list)
# Batch index
n_nodes = np.array(
[x.shape[0] for x in (x_list if x_list is not None else a_list)])
i_out = np.repeat(np.arange(len(n_nodes)), n_nodes)
# Edge attributes
e_out = None
if e_list is not None:
if e_list[0].ndim == 3: # Convert dense to sparse
e_list = [e[sp.find(a)[:-1]] for e, a in zip(e_list, a_list)]
e_out = np.vstack(e_list)
return tuple(out for out in [x_out, a_out, e_out, i_out] if out is not None)
def to_batch(x_list=None, a_list=None, e_list=None):
"""
Converts lists of node features, adjacency matrices and (optionally) edge
features to [batch mode](https://danielegrattarola.github.io/spektral/data/#batch-mode),
by zero-padding all tensors to have the same node dimension `n_max`.
Either the node features or the adjacency matrices must be provided as input.
The i-th element of each list must be associated with the i-th graph.
If `a_list` contains sparse matrices, they will be converted to dense
np.arrays, which can be expensive.
The edge attributes of a graph can be represented as
- a dense array of shape `(n_nodes, n_nodes, n_edge_features)`;
- a sparse edge list of shape `(n_edges, n_edge_features)`;
and they will always be returned as dense arrays.
:param x_list: a list of np.arrays of shape `(n_nodes, n_node_features)`
-- note that `n_nodes` can change between graphs;
:param a_list: a list of np.arrays or scipy.sparse matrices of shape
`(n_nodes, n_nodes)`;
:param e_list: a list of np.arrays of shape
`(n_nodes, n_nodes, n_edge_features)` or `(n_edges, n_edge_features)`;
:return: only if the corresponding list is given as input:
- `x`: np.array of shape `(batch, n_max, n_node_features)`;
- `a`: np.array of shape `(batch, n_max, n_max)`;
- `e`: np.array of shape `(batch, n_max, n_max, n_edge_features)`;
"""
if a_list is None and x_list is None:
raise ValueError('Need at least x_list or a_list')
n_max = max([x.shape[0] for x in (x_list if x_list is not None else a_list)])
# Node features
x_out = None
if x_list is not None:
x_out = pad_jagged_array(x_list, (n_max, -1))
# Adjacency matrix
a_out = None
if a_list is not None:
if hasattr(a_list[0], 'toarray'): # Convert sparse to dense
a_list = [a.toarray() for a in a_list]
a_out = pad_jagged_array(a_list, (n_max, n_max))
# Edge attributes
e_out = None
if e_list is not None:
if e_list[0].ndim == 2: # Sparse to dense
for i in range(len(a_list)):
a, e = a_list[i], e_list[i]
e_new = np.zeros(a.shape + e.shape[-1:])
e_new[np.nonzero(a)] = e
e_list[i] = e_new
e_out = pad_jagged_array(e_list, (n_max, n_max, -1))
return tuple(out for out in [x_out, a_out, e_out] if out is not None)
def batch_generator(data, batch_size=32, epochs=None, shuffle=True):
"""
Iterates over the data for the given number of epochs, yielding batches of
size `batch_size`.
:param data: np.array or list of np.arrays with the same first dimension;
:param batch_size: number of samples in a batch;
:param epochs: number of times to iterate over the data;
:param shuffle: whether to shuffle the data at the beginning of each epoch
:return: batches of size `batch_size`.
"""
if not isinstance(data, (list, tuple)):
data = [data]
if len(data) < 1:
raise ValueError('data cannot be empty')
if len(set([len(item) for item in data])) > 1:
raise ValueError('All inputs must have the same __len__')
if epochs is None or epochs == -1:
epochs = np.inf
len_data = len(data[0])
batches_per_epoch = int(np.ceil(len_data / batch_size))
epoch = 0
while epoch < epochs:
epoch += 1
if shuffle:
shuffle_inplace(*data)
for batch in range(batches_per_epoch):
start = batch * batch_size
stop = min(start + batch_size, len_data)
to_yield = [item[start:stop] for item in data]
if len(data) == 1:
to_yield = to_yield[0]
yield to_yield
def shuffle_inplace(*args):
rng_state = np.random.get_state()
for a in args:
np.random.set_state(rng_state)
np.random.shuffle(a)
def get_spec(x):
if isinstance(x, tf.SparseTensor) or sp.issparse(x):
return tf.SparseTensorSpec
else:
return tf.TensorSpec
def prepend_none(t):
return (None, ) + t
def to_tf_signature(signature):
"""
Converts a Dataset signature to a TensorFlow signature.
:param signature: a Dataset signature.
:return: a TensorFlow signature.
"""
output = []
keys = ['x', 'a', 'e', 'i']
for k in keys:
if k in signature:
shape = signature[k]['shape']
dtype = signature[k]['dtype']
spec = signature[k]['spec']
output.append(spec(shape, dtype))
output = tuple(output)
if 'y' in signature:
shape = signature['y']['shape']
dtype = signature['y']['dtype']
spec = signature['y']['spec']
output = (output, spec(shape, dtype))
return output
| 34.466346 | 92 | 0.638304 |
e5763605901bda1f5dc6f9c19651c13483ed2ea1 | 997 | py | Python | setup.py | eraclitux/ipcampy | bffd1c4df9006705cffa5b83a090b0db90cbcbcf | [
"MIT"
] | 33 | 2015-03-22T04:05:58.000Z | 2021-02-15T14:14:55.000Z | setup.py | eraclitux/ipcampy | bffd1c4df9006705cffa5b83a090b0db90cbcbcf | [
"MIT"
] | 4 | 2015-02-16T19:37:05.000Z | 2017-03-24T15:34:11.000Z | setup.py | eraclitux/ipcampy | bffd1c4df9006705cffa5b83a090b0db90cbcbcf | [
"MIT"
] | 14 | 2016-01-04T08:13:43.000Z | 2021-04-27T13:52:16.000Z | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='IpCamPy',
version='0.3.0',
author='Andrea Masi',
author_email='eraclitux@gmail.com',
packages=['ipcampy', 'ipcampy.test', 'ipcamweb'],
scripts=['bin/campatrol'],
url='https://github.com/eraclitux/ipcampy',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Multimedia :: Video :: Capture',
],
tests_require = [
'mock'
],
keywords='ipcam foscam raspberrypi surveillance',
license='MIT',
description='Easily control ip cameras. Comes with built in utilities to make a simple surveillance system.',
long_description=open('README.rst').read(),
include_package_data=True,
zip_safe=False,
install_requires=[
"requests==1.2.3",
"Flask==0.10.1",
],
)
| 28.485714 | 113 | 0.626881 |
a9049ef09d9a3205ec8dea85b46767504b2aae6c | 6,631 | py | Python | xcsg/impl.py | ebak/pyxcsg | f747321b069f02fc7fbea09129b6949fffd38eb3 | [
"MIT"
] | 1 | 2021-01-15T19:48:42.000Z | 2021-01-15T19:48:42.000Z | xcsg/impl.py | ebak/pyxcsg | f747321b069f02fc7fbea09129b6949fffd38eb3 | [
"MIT"
] | null | null | null | xcsg/impl.py | ebak/pyxcsg | f747321b069f02fc7fbea09129b6949fffd38eb3 | [
"MIT"
] | null | null | null | # Copyright (c) 2021 Endre Bak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import glm
from lxml import etree
from typing import List
from copy import deepcopy
from xcsg.common import IDENTITY, VecLike
def mk_node(name, **attrs):
attr_dict = {}
for k, v in attrs.items():
if isinstance(v, bool):
v = 'true' if v else 'false'
else:
v = str(v)
attr_dict[k] = v
return etree.Element(name, attr_dict)
class ChildValidator:
def __init__(self, child_type, min_cnt=None, max_cnt=None):
self.child_type, self.min_cnt, self.max_cnt = child_type, min_cnt, max_cnt
def validate(self, *objs):
cnt = len(objs)
if self.min_cnt is not None:
assert cnt >= self.min_cnt
if self.max_cnt is not None:
assert cnt <= self.max_cnt
for o in objs:
assert isinstance(o, self.child_type), f'type(o): {type(o)} != child_type: {self.child_type}\no: {o}'
class OneChild(ChildValidator):
def __init__(self, child_type):
super().__init__(child_type, min_cnt=1, max_cnt=1)
class TwoChildren(ChildValidator):
def __init__(self, child_type):
super().__init__(child_type, min_cnt=2, max_cnt=2)
class Obj:
def __init__(self, node_name, child_validator=None, **attrs):
self.node_name, self.child_validator, self.attrs = node_name, child_validator, attrs
self.objs = []
self.model_matrix = glm.identity(glm.mat4x4)
def __call__(self, *objs):
assert self.child_validator is not None
self.child_validator.validate(*objs)
self.objs = objs
return self
def copy(self):
# shallow copy child objects only
objs = self.objs
self.objs = None
clone = deepcopy(self)
clone.objs = self.objs = objs
return clone
def deep_copy(self):
return deepcopy(self)
def to_element(self):
e = mk_node(self.node_name, **self.attrs)
# if the matrix is identity, don't write it
mm = self.model_matrix
if mm != IDENTITY:
m = mk_node('tmatrix')
for i, j, k, l in zip(mm[0], mm[1], mm[2], mm[3]):
m.append(mk_node('trow', c0=i, c1=j, c2=k, c3=l))
e.append(m)
for co in self.objs:
e.append(co.to_element())
for c in self._get_child_nodes():
e.append(c)
return e
def _get_child_nodes(self):
return []
class MoreChildren(ChildValidator):
def __init__(self, child_type):
super().__init__(child_type, min_cnt=1, max_cnt=None)
class Obj2D(Obj):
def __init__(self, node_name, child_validator=None, **attrs):
super().__init__(node_name, child_validator, **attrs)
def __add__(self, o):
from .api import Union2D # cyclic import
return Union2D()(self, o)
def __sub__(self, o):
from .api import Diff2D # cyclic import
return Diff2D()(self, o)
def flatten(parent_obj, *objs):
flattened_objs = []
for o in objs:
if isinstance(o, type(parent_obj)) and parent_obj.model_matrix == o.model_matrix:
# intend to flat nested unions and diffs
flattened_objs += o.objs
else:
flattened_objs.append(o)
return flattened_objs
class FlattenerOp2D(Obj2D):
def __init__(self, node_name, flatten=True, **attrs):
super().__init__(node_name, MoreChildren(Obj2D), **attrs)
self.flatten = flatten
def __call__(self, *objs):
self.child_validator.validate(*objs)
self.objs = flatten(self, *objs) if self.flatten else objs
return self
class Obj3D(Obj):
def __init__(self, node_name, child_validator=None, **attrs):
super().__init__(node_name, child_validator, **attrs)
def __add__(self, o):
from .api import Union3D # cyclic import
return Union3D()(self, o)
def __sub__(self, o):
from .api import Diff3D # cyclic import
return Diff3D()(self, o)
class FlattenerOp3D(Obj3D):
def __init__(self, node_name, flatten=True, **attrs):
super().__init__(node_name, MoreChildren(Obj3D), **attrs)
self.flatten = flatten
def __call__(self, *objs):
self.child_validator.validate(*objs)
self.objs = flatten(self, *objs) if self.flatten else objs
return self
GLM_VEC_LEN = {glm.vec2: 2, glm.vec3: 3, glm.vec4: 4}
def _pad(v: VecLike, new_len: int, pad: int) -> List[float]:
to_pad = new_len - len(v)
if to_pad < 0:
return list(v[:new_len])
pad_list = [pad for _ in range(to_pad)]
return list(v) + pad_list
def tree_struct(op_cls, parts, depth=0):
if len(parts) <= 3:
# print(f'{depth}. leaf_part: {parts[0]}, objs: {parts[0].objs}')
return parts
new_parts = []
odd = len(parts) & 1 == 1
for i in range(int(len(parts) / 2)):
idx = 2 * i
nidx = idx + 1
p0, p1 = parts[idx], parts[nidx]
part = (
op_cls(flatten=False)(p0, p1, parts[nidx + 1])
if odd and nidx == len(parts) - 2 else
op_cls(flatten=False)(p0, p1))
new_parts.append(part)
# print(f'{depth}. new_parts: {new_parts}')
return tree_struct(op_cls, new_parts, depth + 1)
def tree_children(op):
op.objs = tree_struct(type(op), op.objs)
def pre_process(obj, depth=0):
from xcsg.api import Union2D, Union3D # cyclic import
if isinstance(obj, (Union2D, Union3D)):
tree_children(obj)
for o in obj.objs:
pre_process(o, depth + 1)
| 30.699074 | 113 | 0.636405 |
74a45b91df99fa0a7f81ef2d39395c16b52181fa | 2,774 | py | Python | bclearer_boson_1_2_source/b_code/orchestrators/boson_1_2_bclearer_orchestrator.py | teapowell/bclearer_boson_1_2 | 571b2e1ca6dee93ccc5cb4e30abe2660f40c2ac0 | [
"MIT"
] | null | null | null | bclearer_boson_1_2_source/b_code/orchestrators/boson_1_2_bclearer_orchestrator.py | teapowell/bclearer_boson_1_2 | 571b2e1ca6dee93ccc5cb4e30abe2660f40c2ac0 | [
"MIT"
] | null | null | null | bclearer_boson_1_2_source/b_code/orchestrators/boson_1_2_bclearer_orchestrator.py | teapowell/bclearer_boson_1_2 | 571b2e1ca6dee93ccc5cb4e30abe2660f40c2ac0 | [
"MIT"
] | 1 | 2021-11-19T13:05:53.000Z | 2021-11-19T13:05:53.000Z | import os
from bnop_source.b_code.bnop_facades import BnopFacades
from nf_common_source.code.services.datetime_service.time_helpers.time_getter import now_time_as_string_for_files
from nf_common_source.code.services.file_system_service.folder_selector import select_folder
from nf_ea_com_bnop_source.b_code.nf_ea_com_bnop_facades import NfEaComBnopFacades
from nf_ea_common_tools_source.b_code.services.session.orchestrators.ea_tools_session_managers import \
EaToolsSessionManagers
from bclearer_boson_1_2_source.b_code.orchestrators.boson_1_2_bclearer_stages_orchestrator import \
orchestrate_boson_1_2_bclearer_stages
from bclearer_boson_1_2_source.b_code.services.logging.set_up_and_close_out_logging import \
set_up_logger_and_output_folder, close_log_file
def orchestrate_boson_1_2_bclearer():
output_root_folder_path = \
__get_output_root_folder_path()
gml_data_folder = \
select_folder(
title='Select GML folder')
set_up_logger_and_output_folder(
output_folder_name=output_root_folder_path)
__process_gml_folder(
output_folder_name=output_root_folder_path,
gml_data_folder_name=gml_data_folder.absolute_path_string)
close_log_file()
def __get_output_root_folder_path() \
-> str:
output_folder = \
select_folder(
title='Select output folder')
output_folder_path = \
output_folder.absolute_path_string
output_root_folder_path = \
os.path.join(
output_folder_path,
'bOSON_1_2_' + now_time_as_string_for_files().replace('_', ''))
os.mkdir(
output_root_folder_path)
return \
output_root_folder_path
def __process_gml_folder(
output_folder_name: str,
gml_data_folder_name: str) \
-> None:
with EaToolsSessionManagers() \
as ea_tools_session_manager:
evolved_universe = \
orchestrate_boson_1_2_bclearer_stages(
ea_tools_session_manager=ea_tools_session_manager,
output_folder_name=output_folder_name,
gml_data_folder_name=gml_data_folder_name)
__export_to_xml(
evolved_universe=evolved_universe,
output_folder_name=output_folder_name)
def __export_to_xml(
evolved_universe,
output_folder_name) \
-> None:
nf_ea_com_bnop_facade = \
NfEaComBnopFacades()
nf_ea_com_bnop_facade.migrate_nf_ea_com_universe_to_bnop(
nf_ea_com_universe=evolved_universe)
bnop_facade = \
BnopFacades()
xml_file_path = \
os.path.join(
output_folder_name,
'bOSON_1_2.xml')
bnop_facade.write_bnop_object_to_xml(
xml_file_path=xml_file_path)
| 29.827957 | 113 | 0.731074 |
a276019d6bb4b39eae3757b1697893269f1d9176 | 2,172 | py | Python | lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py | ghalimi/stdlib | 88f50b88aa945875ef053e2f89d26f9150a18c12 | [
"BSL-1.0"
] | 3,428 | 2016-07-14T13:48:46.000Z | 2022-03-31T22:32:13.000Z | lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py | ghalimi/stdlib | 88f50b88aa945875ef053e2f89d26f9150a18c12 | [
"BSL-1.0"
] | 435 | 2016-04-07T18:12:45.000Z | 2022-03-22T15:43:17.000Z | lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py | sthagen/stdlib | 042b6215818db0e2a784e72c7e054167dcefcd2a | [
"BSL-1.0"
] | 188 | 2016-11-29T22:58:11.000Z | 2022-03-17T06:46:43.000Z | #!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2018 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark asinh."""
from __future__ import print_function
import timeit
NAME = "asinh"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import asinh; from random import random;"
stmt = "y = asinh(200.0*random() - 100.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in range(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| 22.163265 | 74 | 0.63628 |
c229cacdbc9a14997150b60f206c06c9e86afbcc | 34,552 | py | Python | alpaca_backtrader_api/alpacastore.py | bbeale/alpaca-backtrader-api | 9624b331708ea0c5410dd3828ce26a47a12695fc | [
"Apache-2.0"
] | null | null | null | alpaca_backtrader_api/alpacastore.py | bbeale/alpaca-backtrader-api | 9624b331708ea0c5410dd3828ce26a47a12695fc | [
"Apache-2.0"
] | null | null | null | alpaca_backtrader_api/alpacastore.py | bbeale/alpaca-backtrader-api | 9624b331708ea0c5410dd3828ce26a47a12695fc | [
"Apache-2.0"
] | null | null | null | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import collections
import time
from enum import Enum
import traceback
from datetime import datetime, timedelta, time as dtime
from dateutil.parser import parse as date_parse
import time as _time
import trading_calendars
import threading
import asyncio
import alpaca_trade_api as tradeapi
import pytz
import requests
import pandas as pd
import backtrader as bt
from alpaca_trade_api.entity import Aggs
from backtrader.metabase import MetaParams
from backtrader.utils.py3 import queue, with_metaclass
NY = 'America/New_York'
# Extend the exceptions to support extra cases
class AlpacaError(Exception):
""" Generic error class, catches Alpaca response errors
"""
def __init__(self, error_response):
self.error_response = error_response
msg = "Alpaca API returned error code %s (%s) " % \
(error_response['code'], error_response['message'])
super(AlpacaError, self).__init__(msg)
class AlpacaRequestError(AlpacaError):
def __init__(self):
er = dict(code=599, message='Request Error', description='')
super(self.__class__, self).__init__(er)
class AlpacaStreamError(AlpacaError):
def __init__(self, content=''):
er = dict(code=598, message='Failed Streaming', description=content)
super(self.__class__, self).__init__(er)
class AlpacaTimeFrameError(AlpacaError):
def __init__(self, content):
er = dict(code=597, message='Not supported TimeFrame', description='')
super(self.__class__, self).__init__(er)
class AlpacaNetworkError(AlpacaError):
def __init__(self):
er = dict(code=596, message='Network Error', description='')
super(self.__class__, self).__init__(er)
class API(tradeapi.REST):
def _request(self,
method,
path,
data=None,
base_url=None,
api_version=None):
# Added the try block
try:
return super(API, self)._request(
method, path, data, base_url, api_version)
except requests.RequestException as e:
resp = AlpacaRequestError().error_response
resp['description'] = str(e)
return resp
except tradeapi.rest.APIError as e:
# changed from raise to return
return e._error
except Exception as e:
resp = AlpacaNetworkError().error_response
resp['description'] = str(e)
return resp
return None
class Granularity(Enum):
Ticks = "ticks"
Daily = "day"
Minute = "minute"
class StreamingMethod(Enum):
AccountUpdate = 'account_update'
Quote = "quote"
MinuteAgg = "minute_agg"
class Streamer:
conn = None
def __init__(
self,
q,
api_key='',
api_secret='',
instrument='',
method: StreamingMethod = StreamingMethod.AccountUpdate,
base_url='',
data_url='',
data_stream='',
*args,
**kwargs):
try:
# make sure we have an event loop, if not create a new one
asyncio.get_event_loop()
except RuntimeError:
asyncio.set_event_loop(asyncio.new_event_loop())
self.data_stream = data_stream
self.conn = tradeapi.StreamConn(api_key,
api_secret,
base_url,
data_url=data_url,
data_stream=self.data_stream)
self.instrument = instrument
self.method = method
self.q = q
self.conn.on('authenticated')(self.on_auth)
self.conn.on(r'Q.*')(self.on_quotes)
self.conn.on(r'AM.*')(self.on_agg_min)
self.conn.on(r'A.*')(self.on_agg_min)
self.conn.on(r'account_updates')(self.on_account)
self.conn.on(r'trade_updates')(self.on_trade)
def run(self):
channels = []
if self.method == StreamingMethod.AccountUpdate:
channels = ['trade_updates'] # 'account_updates'
else:
if self.data_stream == 'polygon':
maps = {"quote": "Q.",
"minute_agg": "AM."}
elif self.data_stream == 'alpacadatav1':
maps = {"quote": "alpacadatav1/Q.",
"minute_agg": "alpacadatav1/AM."}
channels = [maps[self.method.value] + self.instrument]
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.conn.run(channels)
# Setup event handlers
async def on_auth(self, conn, stream, msg):
pass
async def on_listen(self, conn, stream, msg):
pass
async def on_quotes(self, conn, subject, msg):
msg._raw['time'] = msg.timestamp.to_pydatetime().timestamp()
self.q.put(msg._raw)
async def on_agg_sec(self, conn, subject, msg):
self.q.put(msg)
async def on_agg_min(self, conn, subject, msg):
msg._raw['time'] = msg.end.to_pydatetime().timestamp()
self.q.put(msg._raw)
async def on_account(self, conn, stream, msg):
self.q.put(msg)
async def on_trade(self, conn, stream, msg):
self.q.put(msg)
class MetaSingleton(MetaParams):
'''Metaclass to make a metaclassed class a singleton'''
def __init__(cls, name, bases, dct):
super(MetaSingleton, cls).__init__(name, bases, dct)
cls._singleton = None
def __call__(cls, *args, **kwargs):
if cls._singleton is None:
cls._singleton = (
super(MetaSingleton, cls).__call__(*args, **kwargs))
return cls._singleton
class AlpacaStore(with_metaclass(MetaSingleton, object)):
'''Singleton class wrapping to control the connections to Alpaca.
Params:
- ``key_id`` (default:``None``): Alpaca API key id
- ``secret_key`` (default: ``None``): Alpaca API secret key
- ``paper`` (default: ``False``): use the paper trading environment
- ``account_tmout`` (default: ``10.0``): refresh period for account
value/cash refresh
'''
BrokerCls = None # broker class will autoregister
DataCls = None # data class will auto register
params = (
('key_id', ''),
('secret_key', ''),
('paper', False),
('usePolygon', False),
('account_tmout', 10.0), # account balance refresh timeout
('api_version', None)
)
_DTEPOCH = datetime(1970, 1, 1)
_ENVPRACTICE = 'paper'
_ENVLIVE = 'live'
_ENV_PRACTICE_URL = 'https://paper-api.alpaca.markets'
_ENV_LIVE_URL = 'https://api.alpaca.markets'
@classmethod
def getdata(cls, *args, **kwargs):
'''Returns ``DataCls`` with args, kwargs'''
return cls.DataCls(*args, **kwargs)
@classmethod
def getbroker(cls, *args, **kwargs):
'''Returns broker with *args, **kwargs from registered ``BrokerCls``'''
return cls.BrokerCls(*args, **kwargs)
def __init__(self):
super(AlpacaStore, self).__init__()
self.notifs = collections.deque() # store notifications for cerebro
self._env = None # reference to cerebro for general notifications
self.broker = None # broker instance
self.datas = list() # datas that have registered over start
self._orders = collections.OrderedDict() # map order.ref to oid
self._ordersrev = collections.OrderedDict() # map oid to order.ref
self._transpend = collections.defaultdict(collections.deque)
if self.p.paper:
self._oenv = self._ENVPRACTICE
self.p.base_url = self._ENV_PRACTICE_URL
else:
self._oenv = self._ENVLIVE
self.p.base_url = self._ENV_LIVE_URL
self.oapi = API(self.p.key_id,
self.p.secret_key,
self.p.base_url,
self.p.api_version)
self._cash = 0.0
self._value = 0.0
self._evt_acct = threading.Event()
def start(self, data=None, broker=None):
# Datas require some processing to kickstart data reception
if data is None and broker is None:
self.cash = None
return
if data is not None:
self._env = data._env
# For datas simulate a queue with None to kickstart co
self.datas.append(data)
if self.broker is not None:
self.broker.data_started(data)
elif broker is not None:
self.broker = broker
self.streaming_events()
self.broker_threads()
def stop(self):
# signal end of thread
if self.broker is not None:
self.q_ordercreate.put(None)
self.q_orderclose.put(None)
self.q_account.put(None)
def put_notification(self, msg, *args, **kwargs):
self.notifs.append((msg, args, kwargs))
def get_notifications(self):
'''Return the pending "store" notifications'''
self.notifs.append(None) # put a mark / threads could still append
return [x for x in iter(self.notifs.popleft, None)]
# Alpaca supported granularities
_GRANULARITIES = {
(bt.TimeFrame.Minutes, 1): '1Min',
(bt.TimeFrame.Minutes, 5): '5Min',
(bt.TimeFrame.Minutes, 15): '15Min',
(bt.TimeFrame.Minutes, 60): '1H',
(bt.TimeFrame.Days, 1): '1D',
}
def get_positions(self):
try:
positions = self.oapi.list_positions()
except (AlpacaError, AlpacaRequestError,):
return []
if positions:
if 'code' in positions[0]._raw:
return []
# poslist = positions.get('positions', [])
return positions
def get_granularity(self, timeframe, compression) -> Granularity:
if timeframe == bt.TimeFrame.Ticks:
return Granularity.Ticks
if timeframe == bt.TimeFrame.Minutes:
return Granularity.Minute
elif timeframe == bt.TimeFrame.Days:
return Granularity.Daily
def get_instrument(self, dataname):
try:
insts = self.oapi.get_asset(dataname)
except (AlpacaError, AlpacaRequestError,):
return None
return insts or None
def streaming_events(self, tmout=None):
q = queue.Queue()
kwargs = {'q': q, 'tmout': tmout}
t = threading.Thread(target=self._t_streaming_listener, kwargs=kwargs)
t.daemon = True
t.start()
t = threading.Thread(target=self._t_streaming_events, kwargs=kwargs)
t.daemon = True
t.start()
return q
def _t_streaming_listener(self, q, tmout=None):
while True:
trans = q.get()
self._transaction(trans.order)
def _t_streaming_events(self, q, tmout=None):
if tmout is not None:
_time.sleep(tmout)
streamer = Streamer(q,
api_key=self.p.key_id,
api_secret=self.p.secret_key,
base_url=self.p.base_url,
data_url=os.environ.get("DATA_PROXY_WS", ''),
data_stream='polygon' if self.p.usePolygon else
'alpacadatav1'
)
streamer.run()
def candles(self, dataname, dtbegin, dtend, timeframe, compression,
candleFormat, includeFirst):
"""
:param dataname: symbol name. e.g AAPL
:param dtbegin: datetime start
:param dtend: datetime end
:param timeframe: bt.TimeFrame
:param compression: distance between samples. e.g if 1 =>
get sample every day. if 3 => get sample every 3 days
:param candleFormat: (bidask, midpoint, trades) - not used we get bars
:param includeFirst:
:return:
"""
kwargs = locals().copy()
kwargs.pop('self')
kwargs['q'] = q = queue.Queue()
t = threading.Thread(target=self._t_candles, kwargs=kwargs)
t.daemon = True
t.start()
return q
@staticmethod
def iso_date(date_str):
"""
this method will make sure that dates are formatted properly
as with isoformat
:param date_str:
:return: YYYY-MM-DD date formatted
"""
return date_parse(date_str).date().isoformat()
def _t_candles(self, dataname, dtbegin, dtend, timeframe, compression,
candleFormat, includeFirst, q):
granularity: Granularity = self.get_granularity(timeframe, compression)
dtbegin, dtend = self._make_sure_dates_are_initialized_properly(
dtbegin, dtend, granularity)
if granularity is None:
e = AlpacaTimeFrameError('granularity is missing')
q.put(e.error_response)
return
try:
if self.p.usePolygon:
cdl = self.get_aggs_from_polygon(dataname,
dtbegin,
dtend,
granularity.value,
compression)
else:
cdl = self.get_aggs_from_alpaca(dataname,
dtbegin,
dtend,
granularity.value,
compression)
except AlpacaError as e:
print(str(e))
q.put(e.error_response)
q.put(None)
return
except Exception:
traceback.print_exc()
q.put({'code': 'error'})
q.put(None)
return
# don't use dt.replace. use localize
# (https://stackoverflow.com/a/1592837/2739124)
cdl = cdl.loc[
pytz.timezone(NY).localize(dtbegin) if
not dtbegin.tzname() else dtbegin:
pytz.timezone(NY).localize(dtend) if
not dtend.tzname() else dtend
].dropna(subset=['high'])
records = cdl.reset_index().to_dict('records')
for r in records:
r['time'] = r['timestamp']
q.put(r)
q.put({}) # end of transmission
def _make_sure_dates_are_initialized_properly(self, dtbegin, dtend,
granularity):
"""
dates may or may not be specified by the user.
when they do, they are probably don't include NY timezome data
also, when granularity is minute, we want to make sure we get data when
market is opened. so if it doesn't - let's set end date to be last
known minute with opened market.
this nethod takes care of all these issues.
:param dtbegin:
:param dtend:
:param granularity:
:return:
"""
if not dtend:
dtend = pd.Timestamp('now', tz=NY)
else:
dtend = pd.Timestamp(pytz.timezone('UTC').localize(dtend))
if granularity == Granularity.Minute:
calendar = trading_calendars.get_calendar(name='NYSE')
while not calendar.is_open_on_minute(dtend):
dtend = dtend.replace(hour=15,
minute=59,
second=0,
microsecond=0)
dtend -= timedelta(days=1)
if not dtbegin:
days = 30 if granularity == Granularity.Daily else 3
delta = timedelta(days=days)
dtbegin = dtend - delta
else:
dtbegin = pd.Timestamp(pytz.timezone('UTC').localize(dtbegin))
while dtbegin > dtend:
# if we start the script during market hours we could get this
# situation. this resolves that.
dtbegin -= timedelta(days=1)
return dtbegin.astimezone(NY), dtend.astimezone(NY)
def get_aggs_from_polygon(self,
dataname,
dtbegin,
dtend,
granularity,
compression):
"""
so polygon has a much more convenient api for this than alpaca because
we could insert the compression in to the api call and we don't need to
resample it. but, at this point in time, something is not working
properly and data is returned in segments. meaning, we have patches of
missing data. e.g we request data from 2020-03-01 to 2020-07-01 and we
get something like this: 2020-03-01:2020-03-15, 2020-06-25:2020-07-01
so that makes life difficult.. there's no way to know which patch will
be returned and which one we should try to get again.
so the solution must be, ask data in segments. I select an arbitrary
time window of 2 weeks, and split the calls until we get all required
data
"""
def _clear_out_of_market_hours(df):
"""
only interested in samples between 9:30, 16:00 NY time
"""
return df.between_time("09:30", "16:00")
if granularity == 'day':
cdl = self.oapi.polygon.historic_agg_v2(
dataname,
compression,
granularity,
_from=self.iso_date(dtbegin.isoformat()),
to=self.iso_date(dtend.isoformat())).df
else:
cdl = pd.DataFrame()
segment_start = dtbegin
segment_end = segment_start + timedelta(weeks=2) if \
dtend - dtbegin >= timedelta(weeks=2) else dtend
while cdl.empty or cdl.index[-1] < dtend.replace(second=0):
# we want to collect data until the last row is later than
# the requested dtend. we don't force it to contain dtend
# because it might be missing, or we may be resampling (so
# again, it will be missing)
response = self.oapi.polygon.historic_agg_v2(
dataname,
compression,
'minute',
_from=self.iso_date(segment_start.isoformat()),
to=self.iso_date(segment_end.isoformat()))
# No result from the server, most likely error
if response.df.shape[0] == 0 and cdl.shape[0] == 0:
raise Exception("received empty response")
temp = response.df
cdl = pd.concat([cdl, temp])
cdl = cdl[~cdl.index.duplicated()]
segment_start = segment_end
segment_end = segment_start + timedelta(weeks=2) if \
dtend - dtbegin >= timedelta(weeks=2) else dtend
cdl = _clear_out_of_market_hours(cdl)
return cdl
def get_aggs_from_alpaca(self,
dataname,
start,
end,
granularity,
compression):
"""
https://alpaca.markets/docs/api-documentation/api-v2/market-data/bars/
Alpaca API as a limit of 1000 records per api call. meaning, we need to
do multiple calls to get all the required data if the date range is
large.
also, the alpaca api does not support compression (or, you can't get
5 minute bars e.g) so we need to resample the received bars.
also, we need to drop out of market records.
this function does all of that.
note:
this was the old way of getting the data
response = self.oapi.get_aggs(dataname,
compression,
granularity,
self.iso_date(start_dt),
self.iso_date(end_dt))
the thing is get_aggs work nicely for days but not for minutes, and
it is not a documented API. barset on the other hand does
but we need to manipulate it to be able to work with it
smoothly and return data the same way polygon does
"""
def _iterate_api_calls():
"""
you could get max 1000 samples from the server. if we need more
than that we need to do several api calls.
currently the alpaca api supports also 5Min and 15Min so we could
optimize server communication time by addressing timeframes
"""
got_all = False
curr = end
response = []
while not got_all:
if granularity == 'minute' and compression == 5:
timeframe = "5Min"
elif granularity == 'minute' and compression == 15:
timeframe = "15Min"
elif granularity == 'ticks':
timeframe = "minute"
else:
timeframe = granularity
r = self.oapi.get_barset(dataname,
'minute' if timeframe == 'ticks' else
timeframe,
limit=1000,
end=curr.isoformat()
)[dataname]
if r:
earliest_sample = r[0].t
r = r._raw
r.extend(response)
response = r
if earliest_sample <= (pytz.timezone(NY).localize(
start) if not start.tzname() else start):
got_all = True
else:
delta = timedelta(days=1) if granularity == "day" \
else timedelta(minutes=1)
curr = earliest_sample - delta
else:
# no more data is available, let's return what we have
break
return response
def _clear_out_of_market_hours(df):
"""
only interested in samples between 9:30, 16:00 NY time
"""
return df.between_time("09:30", "16:00")
def _drop_early_samples(df):
"""
samples from server don't start at 9:30 NY time
let's drop earliest samples
"""
for i, b in df.iterrows():
if i.time() >= dtime(9, 30):
return df[i:]
def _resample(df):
"""
samples returned with certain window size (1 day, 1 minute) user
may want to work with different window size (5min)
"""
if granularity == 'minute':
sample_size = f"{compression}Min"
else:
sample_size = f"{compression}D"
df = df.resample(sample_size).agg(
collections.OrderedDict([
('open', 'first'),
('high', 'max'),
('low', 'min'),
('close', 'last'),
('volume', 'sum'),
])
)
if granularity == 'minute':
return df.between_time("09:30", "16:00")
else:
return df
# def _back_to_aggs(df):
# response = []
# for i, v in df.iterrows():
# response.append({
# "o": v.open,
# "h": v.high,
# "l": v.low,
# "c": v.close,
# "v": v.volume,
# "t": i.timestamp() * 1000,
# })
# return Aggs({"results": response})
if not start:
response = self.oapi.get_barset(dataname,
granularity,
limit=1000,
end=end)[dataname]._raw
else:
response = _iterate_api_calls()
for bar in response:
# Aggs are in milliseconds, we multiply by 1000 to
# change seconds to ms
bar['t'] *= 1000
response = Aggs({"results": response})
cdl = response.df
if granularity == 'minute':
cdl = _clear_out_of_market_hours(cdl)
cdl = _drop_early_samples(cdl)
if compression != 1:
response = _resample(cdl)
# response = _back_to_aggs(cdl)
else:
response = cdl
response = response.dropna()
response = response[~response.index.duplicated()]
return response
def streaming_prices(self, dataname, timeframe, tmout=None):
q = queue.Queue()
kwargs = {'q': q,
'dataname': dataname,
'timeframe': timeframe,
'tmout': tmout}
t = threading.Thread(target=self._t_streaming_prices, kwargs=kwargs)
t.daemon = True
t.start()
return q
def _t_streaming_prices(self, dataname, timeframe, q, tmout):
if tmout is not None:
_time.sleep(tmout)
if timeframe == bt.TimeFrame.Ticks:
method = StreamingMethod.Quote
elif timeframe == bt.TimeFrame.Minutes:
method = StreamingMethod.MinuteAgg
streamer = Streamer(q,
api_key=self.p.key_id,
api_secret=self.p.secret_key,
instrument=dataname,
method=method,
base_url=self.p.base_url,
data_url=os.environ.get("DATA_PROXY_WS", ''),
data_stream='polygon' if self.p.usePolygon else
'alpacadatav1')
streamer.run()
def get_cash(self):
return self._cash
def get_value(self):
return self._value
_ORDEREXECS = {
bt.Order.Market: 'market',
bt.Order.Limit: 'limit',
bt.Order.Stop: 'stop',
bt.Order.StopLimit: 'stop_limit',
bt.Order.StopTrail: 'trailing_stop',
}
def broker_threads(self):
self.q_account = queue.Queue()
self.q_account.put(True) # force an immediate update
t = threading.Thread(target=self._t_account)
t.daemon = True
t.start()
self.q_ordercreate = queue.Queue()
t = threading.Thread(target=self._t_order_create)
t.daemon = True
t.start()
self.q_orderclose = queue.Queue()
t = threading.Thread(target=self._t_order_cancel)
t.daemon = True
t.start()
# Wait once for the values to be set
self._evt_acct.wait(self.p.account_tmout)
def _t_account(self):
while True:
try:
msg = self.q_account.get(timeout=self.p.account_tmout)
if msg is None:
break # end of thread
except queue.Empty: # tmout -> time to refresh
pass
try:
accinfo = self.oapi.get_account()
except Exception as e:
self.put_notification(e)
continue
if 'code' in accinfo._raw:
self.put_notification(accinfo.message)
continue
try:
self._cash = float(accinfo.cash)
self._value = float(accinfo.portfolio_value)
except KeyError:
pass
self._evt_acct.set()
def order_create(self, order, stopside=None, takeside=None, **kwargs):
okwargs = dict()
# different data feeds may set _name or _dataname so we cover both
okwargs['symbol'] = order.data._name if order.data._name else \
order.data._dataname
okwargs['qty'] = abs(int(order.created.size))
okwargs['side'] = 'buy' if order.isbuy() else 'sell'
okwargs['type'] = self._ORDEREXECS[order.exectype]
okwargs['time_in_force'] = "gtc"
if order.exectype not in [bt.Order.Market, bt.Order.StopTrail,
bt.Order.Stop]:
okwargs['limit_price'] = str(order.created.price)
if order.exectype in [bt.Order.StopLimit, bt.Order.Stop]:
okwargs['stop_price'] = order.created.pricelimit
# Not supported in the alpaca api
# if order.exectype == bt.Order.StopTrail:
# okwargs['trailingStop'] = order.trailamount
if stopside:
okwargs['stop_loss'] = {'stop_price': stopside.price}
if takeside:
okwargs['take_profit'] = {'limit_price': takeside.price}
if stopside or takeside:
okwargs['order_class'] = "bracket"
if order.exectype == bt.Order.StopTrail:
if order.trailpercent and order.trailamount:
raise Exception("You can't create trailing stop order with "
"both TrailPrice and TrailPercent. choose one")
if order.trailpercent:
okwargs['trail_percent'] = order.trailpercent
elif order.trailamount:
okwargs['trail_price'] = order.trailamount
else:
raise Exception("You must provide either trailpercent or "
"trailamount when creating StopTrail order")
# anything from the user
okwargs.update(order.info)
okwargs.update(**kwargs)
self.q_ordercreate.put((order.ref, okwargs,))
return order
def _t_order_create(self):
def _check_if_transaction_occurred(order_id):
# a transaction may have happened and was stored. if so let's
# process it
tpending = self._transpend[order_id]
tpending.append(None) # eom marker
while True:
trans = tpending.popleft()
if trans is None:
break
self._process_transaction(order_id, trans)
while True:
try:
if self.q_ordercreate.empty():
continue
msg = self.q_ordercreate.get()
if msg is None:
continue
oref, okwargs = msg
try:
o = self.oapi.submit_order(**okwargs)
except Exception as e:
self.put_notification(e)
self.broker._reject(oref)
continue
try:
oid = o.id
except Exception:
if 'code' in o._raw:
self.put_notification(f"error submitting order "
f"code: {o.code}. msg: "
f"{o.message}")
else:
self.put_notification(
"General error from the Alpaca server")
self.broker._reject(oref)
continue
if okwargs['type'] == 'market':
self.broker._accept(oref) # taken immediately
self._orders[oref] = oid
self._ordersrev[oid] = oref # maps ids to backtrader order
_check_if_transaction_occurred(oid)
if o.legs:
index = 1
for leg in o.legs:
self._orders[oref + index] = leg.id
self._ordersrev[leg.id] = oref + index
_check_if_transaction_occurred(leg.id)
self.broker._submit(oref) # inside it submits the legs too
if okwargs['type'] == 'market':
self.broker._accept(oref) # taken immediately
except Exception as e:
print(str(e))
def order_cancel(self, order):
self.q_orderclose.put(order.ref)
return order
def _t_order_cancel(self):
while True:
oref = self.q_orderclose.get()
if oref is None:
break
oid = self._orders.get(oref, None)
if oid is None:
continue # the order is no longer there
try:
self.oapi.cancel_order(oid)
except Exception as e:
self.put_notification(
"Order not cancelled: {}, {}".format(
oid, e))
continue
self.broker._cancel(oref)
_X_ORDER_CREATE = (
'new',
'accepted',
'pending_new',
'accepted_for_bidding',
)
def _transaction(self, trans):
# Invoked from Streaming Events. May actually receive an event for an
# oid which has not yet been returned after creating an order. Hence
# store if not yet seen, else forward to processer
oid = trans['id']
if not self._ordersrev.get(oid, False):
self._transpend[oid].append(trans)
self._process_transaction(oid, trans)
_X_ORDER_FILLED = ('partially_filled', 'filled', )
def _process_transaction(self, oid, trans):
try:
oref = self._ordersrev.pop(oid)
except KeyError:
return
ttype = trans['status']
if ttype in self._X_ORDER_FILLED:
size = float(trans['filled_qty'])
if trans['side'] == 'sell':
size = -size
price = float(trans['filled_avg_price'])
self.broker._fill(oref, size, price, ttype=ttype)
elif ttype in self._X_ORDER_CREATE:
self.broker._accept(oref)
self._ordersrev[oid] = oref
elif ttype == 'calculated':
return
elif ttype == 'expired':
self.broker._expire(oref)
else: # default action ... if nothing else
print("Process transaction - Order type: {}".format(ttype))
self.broker._reject(oref)
| 35.768116 | 79 | 0.531691 |
602dcc4655b07fd428dc44e566e65a45402c7fe2 | 26,737 | py | Python | integration_tests/test_suites/k8s-integration-test-suite/test_scheduler.py | coderanger/dagster | d3e323f8ed55cd906d6f44f19595348ea1580b2d | [
"Apache-2.0"
] | null | null | null | integration_tests/test_suites/k8s-integration-test-suite/test_scheduler.py | coderanger/dagster | d3e323f8ed55cd906d6f44f19595348ea1580b2d | [
"Apache-2.0"
] | null | null | null | integration_tests/test_suites/k8s-integration-test-suite/test_scheduler.py | coderanger/dagster | d3e323f8ed55cd906d6f44f19595348ea1580b2d | [
"Apache-2.0"
] | null | null | null | import os
import subprocess
import sys
from contextlib import contextmanager
import kubernetes
import pytest
from dagster import DagsterInstance, ScheduleDefinition, seven
from dagster.core.definitions import lambda_solid, pipeline, repository
from dagster.core.host_representation import (
ManagedGrpcPythonEnvRepositoryLocationOrigin,
RepositoryLocation,
RepositoryLocationHandle,
)
from dagster.core.scheduler.job import JobStatus, JobType
from dagster.core.scheduler.scheduler import (
DagsterScheduleDoesNotExist,
DagsterScheduleReconciliationError,
DagsterSchedulerError,
)
from dagster.core.storage.pipeline_run import PipelineRunStatus
from dagster.core.test_utils import environ
from dagster.core.types.loadable_target_origin import LoadableTargetOrigin
from marks import mark_scheduler
@pytest.fixture(scope="function")
def unset_dagster_home():
old_env = os.getenv("DAGSTER_HOME")
if old_env is not None:
del os.environ["DAGSTER_HOME"]
yield
if old_env is not None:
os.environ["DAGSTER_HOME"] = old_env
@pipeline
def no_config_pipeline():
@lambda_solid
def return_hello():
return "Hello"
return return_hello()
schedules_dict = {
"no_config_pipeline_daily_schedule": ScheduleDefinition(
name="no_config_pipeline_daily_schedule",
cron_schedule="0 0 * * *",
pipeline_name="no_config_pipeline",
run_config={"storage": {"filesystem": None}},
),
"no_config_pipeline_every_min_schedule": ScheduleDefinition(
name="no_config_pipeline_every_min_schedule",
cron_schedule="* * * * *",
pipeline_name="no_config_pipeline",
run_config={"storage": {"filesystem": None}},
),
"default_config_pipeline_every_min_schedule": ScheduleDefinition(
name="default_config_pipeline_every_min_schedule",
cron_schedule="* * * * *",
pipeline_name="no_config_pipeline",
),
}
def define_schedules():
return list(schedules_dict.values())
@repository
def test_repository():
if os.getenv("DAGSTER_TEST_SMALL_REPO"):
return [no_config_pipeline] + list(
filter(
lambda x: not x.name == "default_config_pipeline_every_min_schedule",
define_schedules(),
)
)
return [no_config_pipeline] + define_schedules()
@contextmanager
def get_test_external_repo():
with RepositoryLocationHandle.create_from_repository_location_origin(
ManagedGrpcPythonEnvRepositoryLocationOrigin(
loadable_target_origin=LoadableTargetOrigin(
executable_path=sys.executable, python_file=__file__, attribute="test_repository",
),
location_name="test_location",
),
) as handle:
yield RepositoryLocation.from_handle(handle).get_repository("test_repository")
@contextmanager
def get_smaller_external_repo():
with environ({"DAGSTER_TEST_SMALL_REPO": "1"}):
with get_test_external_repo() as repo:
yield repo
@mark_scheduler
def test_init(
dagster_instance_with_k8s_scheduler,
schedule_tempdir,
helm_namespace_for_k8s_run_launcher,
restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repository:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repository)
# Check schedules are saved to disk
assert "schedules" in os.listdir(schedule_tempdir)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 3
schedules = instance.all_stored_job_state(job_type=JobType.SCHEDULE)
for schedule in schedules:
assert schedule.status == JobStatus.STOPPED
@mark_scheduler
def test_re_init(
dagster_instance_with_k8s_scheduler,
schedule_tempdir,
helm_namespace_for_k8s_run_launcher,
restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
# Start schedule
schedule_state = instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
# Re-initialize scheduler
instance.reconcile_scheduler_state(external_repo)
# Check schedules are saved to disk
assert "schedules" in os.listdir(schedule_tempdir)
schedule_states = instance.all_stored_job_state(job_type=JobType.SCHEDULE)
for state in schedule_states:
if state.name == "no_config_pipeline_every_min_schedule":
assert state == schedule_state
@mark_scheduler
def test_start_and_stop_schedule(
dagster_instance_with_k8s_scheduler,
schedule_tempdir,
helm_namespace_for_k8s_run_launcher,
restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
schedule = external_repo.get_external_schedule(
schedule_name="no_config_pipeline_every_min_schedule"
)
schedule_origin_id = schedule.get_external_origin_id()
instance.start_schedule_and_update_storage_state(external_schedule=schedule)
assert "schedules" in os.listdir(schedule_tempdir)
assert instance.scheduler.get_cron_job(schedule_origin_id=schedule_origin_id)
instance.stop_schedule_and_update_storage_state(schedule_origin_id=schedule_origin_id)
assert not instance.scheduler.get_cron_job(schedule_origin_id=schedule_origin_id)
@mark_scheduler
def test_start_non_existent_schedule(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with pytest.raises(DagsterScheduleDoesNotExist):
# Initialize scheduler
instance.stop_schedule_and_update_storage_state("asdf")
@mark_scheduler
def test_start_schedule_cron_job(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_daily_schedule")
)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("default_config_pipeline_every_min_schedule")
)
# Inspect the cron tab
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 3
external_schedules_dict = {
external_repo.get_external_schedule(name).get_external_origin_id(): schedule_def
for name, schedule_def in schedules_dict.items()
}
for cron_job in cron_jobs:
cron_schedule = cron_job.spec.schedule
command = cron_job.spec.job_template.spec.template.spec.containers[0].command
args = cron_job.spec.job_template.spec.template.spec.containers[0].args
schedule_origin_id = cron_job.metadata.name
schedule_def = external_schedules_dict[schedule_origin_id]
assert cron_schedule == schedule_def.cron_schedule
assert command == None
assert args[:5] == [
"dagster",
"api",
"launch_scheduled_execution",
"/tmp/launch_scheduled_execution_output",
"--schedule_name",
]
@mark_scheduler
def test_remove_schedule_def(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 3
with get_smaller_external_repo() as smaller_repo:
instance.reconcile_scheduler_state(smaller_repo)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 2
@mark_scheduler
def test_add_schedule_def(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
with get_smaller_external_repo() as smaller_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(smaller_repo)
# Start all schedule and verify cron tab, schedule storage, and errors
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_daily_schedule")
)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 2
assert len(instance.scheduler.get_all_cron_jobs()) == 2
assert len(instance.scheduler_debug_info().errors) == 0
# Reconcile with an additional schedule added
instance.reconcile_scheduler_state(external_repo)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 3
assert len(instance.scheduler.get_all_cron_jobs()) == 2
assert len(instance.scheduler_debug_info().errors) == 0
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("default_config_pipeline_every_min_schedule")
)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 3
assert len(instance.scheduler.get_all_cron_jobs()) == 3
assert len(instance.scheduler_debug_info().errors) == 0
@mark_scheduler
def test_start_and_stop_schedule_cron_tab(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
# Start schedule
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 1
# Try starting it again
with pytest.raises(DagsterSchedulerError):
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 1
# Start another schedule
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_daily_schedule")
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 2
# Stop second schedule
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"no_config_pipeline_daily_schedule"
).get_external_origin_id()
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 1
# Try stopping second schedule again
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"no_config_pipeline_daily_schedule"
).get_external_origin_id()
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 1
# Start second schedule
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_daily_schedule")
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 2
# Reconcile schedule state, should be in the same state
instance.reconcile_scheduler_state(external_repo)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 2
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("default_config_pipeline_every_min_schedule")
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 3
# Reconcile schedule state, should be in the same state
instance.reconcile_scheduler_state(external_repo)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 3
# Stop all schedules
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
).get_external_origin_id()
)
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"no_config_pipeline_daily_schedule"
).get_external_origin_id()
)
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"default_config_pipeline_every_min_schedule"
).get_external_origin_id()
)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 0
# Reconcile schedule state, should be in the same state
instance.reconcile_scheduler_state(external_repo)
cron_jobs = instance.scheduler.get_all_cron_jobs()
assert len(cron_jobs) == 0
@mark_scheduler
def test_script_execution(
dagster_instance_with_k8s_scheduler,
unset_dagster_home,
helm_namespace_for_k8s_run_launcher,
restore_k8s_cron_tab,
): # pylint:disable=unused-argument,redefined-outer-name
with seven.TemporaryDirectory() as tempdir:
with environ({"DAGSTER_HOME": tempdir}):
local_instance = DagsterInstance.get()
with get_test_external_repo() as external_repo:
# Initialize scheduler
dagster_instance_with_k8s_scheduler.reconcile_scheduler_state(external_repo)
dagster_instance_with_k8s_scheduler.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
local_runs = local_instance.get_runs()
assert len(local_runs) == 0
cron_job_name = external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
).get_external_origin_id()
batch_v1beta1_api = kubernetes.client.BatchV1beta1Api()
cron_job = batch_v1beta1_api.read_namespaced_cron_job(
cron_job_name, helm_namespace_for_k8s_run_launcher
)
container = cron_job.spec.job_template.spec.template.spec.containers[0]
args = container.args
cli_cmd = [sys.executable, "-m"] + args
p = subprocess.Popen(
cli_cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={
"DAGSTER_HOME": tempdir,
"LC_ALL": "C.UTF-8",
"LANG": "C.UTF-8",
}, # https://stackoverflow.com/questions/36651680/click-will-abort-further-execution-because-python-3-was-configured-to-use-ascii
)
stdout, stderr = p.communicate()
print("Command completed with stdout: ", stdout) # pylint: disable=print-call
print("Command completed with stderr: ", stderr) # pylint: disable=print-call
assert p.returncode == 0
local_runs = local_instance.get_runs()
assert len(local_runs) == 1
run_id = local_runs[0].run_id
pipeline_run = local_instance.get_run_by_id(run_id)
assert pipeline_run
assert pipeline_run.status == PipelineRunStatus.SUCCESS
@mark_scheduler
def test_start_schedule_fails(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
def raises(*args, **kwargs):
raise Exception("Patch")
instance._scheduler._api.create_namespaced_cron_job = ( # pylint: disable=protected-access
raises
)
with pytest.raises(Exception, match="Patch"):
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
schedule = instance.get_job_state(
external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
).get_external_origin_id()
)
assert schedule.status == JobStatus.STOPPED
@mark_scheduler
def test_start_schedule_unsuccessful(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
def do_nothing(**_):
pass
instance._scheduler._api.create_namespaced_cron_job = ( # pylint: disable=protected-access
do_nothing
)
# Start schedule
with pytest.raises(
DagsterSchedulerError,
match="Attempted to add K8s CronJob for schedule no_config_pipeline_every_min_schedule, "
"but failed. The schedule no_config_pipeline_every_min_schedule is not running.",
):
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
@mark_scheduler
def test_start_schedule_manual_delete_debug(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
instance.scheduler.get_all_cron_jobs()
# Manually delete the schedule
instance.scheduler._end_cron_job( # pylint: disable=protected-access
external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
).get_external_origin_id(),
)
# Check debug command
debug_info = instance.scheduler_debug_info()
assert len(debug_info.errors) == 1
# Reconcile should fix error
instance.reconcile_scheduler_state(external_repo)
debug_info = instance.scheduler_debug_info()
assert len(debug_info.errors) == 0
@mark_scheduler
def test_start_schedule_manual_add_debug(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
# Manually add the schedule from to the crontab
instance.scheduler._start_cron_job( # pylint: disable=protected-access
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
# Check debug command
debug_info = instance.scheduler_debug_info()
assert len(debug_info.errors) == 1
# Reconcile should fix error
instance.reconcile_scheduler_state(external_repo)
debug_info = instance.scheduler_debug_info()
assert len(debug_info.errors) == 0
@mark_scheduler
def test_stop_schedule_fails(
dagster_instance_with_k8s_scheduler,
schedule_tempdir,
helm_namespace_for_k8s_run_launcher,
restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
external_schedule = external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
)
schedule_origin_id = external_schedule.get_external_origin_id()
def raises(*args, **kwargs):
raise Exception("Patch")
instance._scheduler._end_cron_job = raises # pylint: disable=protected-access
instance.start_schedule_and_update_storage_state(external_schedule)
assert "schedules" in os.listdir(schedule_tempdir)
# End schedule
with pytest.raises(Exception, match="Patch"):
instance.stop_schedule_and_update_storage_state(schedule_origin_id)
schedule = instance.get_job_state(schedule_origin_id)
assert schedule.status == JobStatus.RUNNING
@mark_scheduler
def test_stop_schedule_unsuccessful(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
def do_nothing(**_):
pass
instance._scheduler._end_cron_job = do_nothing # pylint: disable=protected-access
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
# End schedule
with pytest.raises(
DagsterSchedulerError,
match="Attempted to remove existing K8s CronJob for schedule "
"no_config_pipeline_every_min_schedule, but failed. Schedule is still running.",
):
instance.stop_schedule_and_update_storage_state(
external_repo.get_external_schedule(
"no_config_pipeline_every_min_schedule"
).get_external_origin_id()
)
@mark_scheduler
def test_wipe(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
# Start schedule
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
# Wipe scheduler
instance.wipe_all_schedules()
# Check schedules are wiped
assert instance.all_stored_job_state(job_type=JobType.SCHEDULE) == []
@mark_scheduler
def test_reconcile_failure(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
instance.reconcile_scheduler_state(external_repo)
instance.start_schedule_and_update_storage_state(
external_repo.get_external_schedule("no_config_pipeline_every_min_schedule")
)
def failed_start_job(*_):
raise DagsterSchedulerError("Failed to start")
def failed_refresh_job(*_):
raise DagsterSchedulerError("Failed to refresh")
def failed_end_job(*_):
raise DagsterSchedulerError("Failed to stop")
instance._scheduler.start_schedule = failed_start_job # pylint: disable=protected-access
instance._scheduler.refresh_schedule = ( # pylint: disable=protected-access
failed_refresh_job
)
instance._scheduler.stop_schedule = failed_end_job # pylint: disable=protected-access
with pytest.raises(
DagsterScheduleReconciliationError,
match="Error 1: Failed to stop\n Error 2: Failed to stop\n Error 3: Failed to refresh",
):
instance.reconcile_scheduler_state(external_repo)
@mark_scheduler
def test_reconcile_failure_when_deleting_schedule_def(
dagster_instance_with_k8s_scheduler, helm_namespace_for_k8s_run_launcher, restore_k8s_cron_tab,
): # pylint:disable=unused-argument
instance = dagster_instance_with_k8s_scheduler
with get_test_external_repo() as external_repo:
# Initialize scheduler
instance.reconcile_scheduler_state(external_repo)
assert len(instance.all_stored_job_state(job_type=JobType.SCHEDULE)) == 3
def failed_end_job(*_):
raise DagsterSchedulerError("Failed to stop")
instance._scheduler.stop_schedule_and_delete_from_storage = ( # pylint: disable=protected-access
failed_end_job
)
with pytest.raises(
DagsterScheduleReconciliationError, match="Error 1: Failed to stop",
):
with get_smaller_external_repo() as smaller_repo:
instance.reconcile_scheduler_state(smaller_repo)
| 37.083218 | 150 | 0.709728 |
c0a706d253998661233f300730b2adfef8a6ac7c | 533 | py | Python | GettingStartTensorFlow/Chapter2/JuliaSet.py | zhangxiaoya/Practice-TensorFlow | e15bb95f88c99ada17d66880012e7ef25f6a7240 | [
"MIT"
] | null | null | null | GettingStartTensorFlow/Chapter2/JuliaSet.py | zhangxiaoya/Practice-TensorFlow | e15bb95f88c99ada17d66880012e7ef25f6a7240 | [
"MIT"
] | null | null | null | GettingStartTensorFlow/Chapter2/JuliaSet.py | zhangxiaoya/Practice-TensorFlow | e15bb95f88c99ada17d66880012e7ef25f6a7240 | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
Y, X = np.mgrid[-2:2:0.005, -2:2:0.005]
Z = X + 1j * Y
Z = tf.constant(Z.astype("complex64"))
zs = tf.Variable(Z)
ns = tf.Variable(tf.zeros_like(Z, "float32"))
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
c = complex(0.0, 0.75)
zs_ = zs * zs - c
not_diverged = tf.abs(zs_) < 4
step = tf.group(zs.assign(zs_), ns.assign_add(tf.cast(not_diverged, "float32")))
for i in range(200):
step.run()
plt.imshow(ns.eval())
plt.show() | 22.208333 | 80 | 0.677298 |
964ad4bf379f04669b12592e33463ad9b36d6297 | 5,341 | py | Python | inout.py | RohJiHyun/splocs | 4c6ce9983000fdc12e404b9ae04f9d1a6c9bdf5d | [
"MIT"
] | null | null | null | inout.py | RohJiHyun/splocs | 4c6ce9983000fdc12e404b9ae04f9d1a6c9bdf5d | [
"MIT"
] | null | null | null | inout.py | RohJiHyun/splocs | 4c6ce9983000fdc12e404b9ae04f9d1a6c9bdf5d | [
"MIT"
] | null | null | null | from os import path
from glob import glob
from io import StringIO
import numpy as np
import h5py
from scipy.sparse.csgraph import connected_components
from scipy.sparse import csr_matrix
from util import sort_nicely, veclen, filter_reindex
def convert_sequence_to_hdf5(filename_pattern, loader_function, hdf_output_file):
verts_all = []
tris = None
files = glob(path.expanduser(filename_pattern))
sort_nicely(files)
for i, f in enumerate(files):
print( "loading file %d/%d [%s]" % (i+1, len(files), f))
verts, new_tris = loader_function(f)
if tris is not None and new_tris.shape != tris.shape and new_tris != tris:
raise ValueError("inconsistent topology between meshes of different frames")
tris = new_tris
verts_all.append(verts)
verts_all = np.array(verts_all, np.float32)
verts_all, tris, _, verts_mean, verts_scale = preprocess_mesh_animation(verts_all, tris)
with h5py.File(hdf_output_file, 'w') as f:
f.create_dataset('verts', data=verts_all, compression='gzip')
f['tris'] = tris
f.attrs['mean'] = verts_mean
f.attrs['scale'] = verts_scale
print ("saved as %s" % hdf_output_file)
def preprocess_mesh_animation(verts, tris):
"""
Preprocess the mesh animation:
- removes zero-area triangles
- keep only the biggest connected component in the mesh
- normalize animation into -0.5 ... 0.5 cube
"""
print( "Vertices: ", verts.shape)
print( "Triangles: ", verts.shape)
print(type(verts))
print(type(tris))
assert (verts.ndim == 3, "")
assert (tris.ndim == 2, "")
# check for zero-area triangles and filter
e1 = verts[0, tris[:,1]] - verts[0, tris[:,0]]
e2 = verts[0, tris[:,2]] - verts[0, tris[:,0]]
n = np.cross(e1, e2)
tris = tris[veclen(n) > 1.e-8]
# remove unconnected vertices
ij = np.r_[np.c_[tris[:,0], tris[:,1]],
np.c_[tris[:,0], tris[:,2]],
np.c_[tris[:,1], tris[:,2]]]
G = csr_matrix((np.ones(len(ij)), ij.T), shape=(verts.shape[1], verts.shape[1]))
n_components, labels = connected_components(G, directed=False)
if n_components > 1:
size_components = np.bincount(labels)
if len(size_components) > 1:
print ("[warning] found %d connected components in the mesh, keeping only the biggest one" % n_components)
print ("component sizes: ")
print (size_components)
keep_vert = labels == size_components.argmax()
else:
keep_vert = np.ones(verts.shape[1], np.bool)
verts = verts[:, keep_vert, :]
tris = filter_reindex(keep_vert, tris[keep_vert[tris].all(axis=1)])
# normalize triangles to -0.5...0.5 cube
verts_mean = verts.mean(axis=0).mean(axis=0)
verts -= verts_mean
verts_scale = np.abs(verts.ptp(axis=1)).max()
verts /= verts_scale
print ("after preprocessing:")
print ("Vertices: ", verts.shape)
print ("Triangles: ", verts.shape)
return verts, tris, ~keep_vert, verts_mean, verts_scale
def load_ply(filename):
try:
from enthought.tvtk.api import tvtk
except ImportError:
try:
from tvtk.api import tvtk
except ImportError:
print ("Reading PLY files requires TVTK. The easiest way is to install mayavi2")
print ("(e.g. on Ubuntu: apt-get install mayavi2)")
raise
reader = tvtk.PLYReader(file_name=filename)
reader.update()
polys = reader.output.polys.to_array().reshape((-1, 4))
assert( np.all(polys[:,0] == 3))
return reader.output.points.to_array(), polys[:,1:]
def load_off(filename, no_colors=False):
lines = open(filename).readlines()
lines = [line for line in lines if line.strip() != '' and line[0] != '#']
assert( lines[0].strip() in ['OFF', 'COFF'], 'OFF header missing')
has_colors = lines[0].strip() == 'COFF'
n_verts, n_faces, _ = map(int, lines[1].split())
vertex_data = np.loadtxt(
StringIO(''.join(lines[2:2 + n_verts])),
dtype=np.float)
if n_faces > 0:
faces = np.loadtxt(StringIO(''.join(lines[2+n_verts:])), dtype=np.int)[:,1:]
else:
faces = None
if has_colors:
colors = vertex_data[:,3:].astype(np.uint8)
vertex_data = vertex_data[:,:3]
else:
colors = None
if no_colors:
return vertex_data, faces
else:
return vertex_data, colors, faces
def save_off(filename, vertices=None, faces=None):
if vertices is None:
vertices = []
if faces is None:
faces = []
with open(filename, 'w') as f:
f.write("OFF\n%d %d 0\n" % (len(vertices), len(faces)))
if len(vertices) > 1:
np.savetxt(f, vertices, fmt="%f %f %f")
if len(faces) > 1:
for face in faces:
fmt = " ".join(["%d"] * (len(face) + 1)) + "\n"
f.write(fmt % ((len(face),) + tuple(map(int, face))))
def load_splocs(component_hdf5_file):
with h5py.File(component_hdf5_file, 'r') as f:
tris = f['tris'][()]
Xmean = f['default'][()]
names = sorted(list(set(f.keys()) - set(['tris', 'default'])))
components = np.array([
f[name][()] - Xmean
for name in names])
return Xmean, tris, components, names
| 37.34965 | 118 | 0.607939 |
c8017b0051725007f0b5c23600cf2130fbb334a8 | 1,743 | py | Python | templates/functions/layers/dependencies/python/bin/jp.py | 1Strategy/custom-resource-starter-template | d8297691e780c81e8ec94766fba13773e9fe9ff7 | [
"Apache-2.0"
] | null | null | null | templates/functions/layers/dependencies/python/bin/jp.py | 1Strategy/custom-resource-starter-template | d8297691e780c81e8ec94766fba13773e9fe9ff7 | [
"Apache-2.0"
] | null | null | null | templates/functions/layers/dependencies/python/bin/jp.py | 1Strategy/custom-resource-starter-template | d8297691e780c81e8ec94766fba13773e9fe9ff7 | [
"Apache-2.0"
] | 4 | 2021-11-25T00:16:19.000Z | 2021-12-06T09:26:22.000Z | #!/Users/scott/.local/share/virtualenvs/custom_resource_starter-u0Y-q6K1/bin/python3.8
import sys
import json
import argparse
from pprint import pformat
import jmespath
from jmespath import exceptions
def main():
parser = argparse.ArgumentParser()
parser.add_argument('expression')
parser.add_argument('-f', '--filename',
help=('The filename containing the input data. '
'If a filename is not given then data is '
'read from stdin.'))
parser.add_argument('--ast', action='store_true',
help=('Pretty print the AST, do not search the data.'))
args = parser.parse_args()
expression = args.expression
if args.ast:
# Only print the AST
expression = jmespath.compile(args.expression)
sys.stdout.write(pformat(expression.parsed))
sys.stdout.write('\n')
return 0
if args.filename:
with open(args.filename, 'r') as f:
data = json.load(f)
else:
data = sys.stdin.read()
data = json.loads(data)
try:
sys.stdout.write(json.dumps(
jmespath.search(expression, data), indent=4))
sys.stdout.write('\n')
except exceptions.ArityError as e:
sys.stderr.write("invalid-arity: %s\n" % e)
return 1
except exceptions.JMESPathTypeError as e:
sys.stderr.write("invalid-type: %s\n" % e)
return 1
except exceptions.UnknownFunctionError as e:
sys.stderr.write("unknown-function: %s\n" % e)
return 1
except exceptions.ParseError as e:
sys.stderr.write("syntax-error: %s\n" % e)
return 1
if __name__ == '__main__':
sys.exit(main())
| 31.690909 | 86 | 0.604705 |
ec44fc585141a5d6b32a7d67ec2cb7faed5f3018 | 2,802 | py | Python | session.py | arturosevilla/notification-server-example | a180f411a65e389c7b21cb9b8d6af8f973bf204e | [
"BSD-3-Clause"
] | 1 | 2015-09-16T10:32:07.000Z | 2015-09-16T10:32:07.000Z | session.py | arturosevilla/notification-server-example | a180f411a65e389c7b21cb9b8d6af8f973bf204e | [
"BSD-3-Clause"
] | null | null | null | session.py | arturosevilla/notification-server-example | a180f411a65e389c7b21cb9b8d6af8f973bf204e | [
"BSD-3-Clause"
] | null | null | null | from redis import Redis
import pickle
from datetime import timedelta
from werkzeug.datastructures import CallbackDict
from flask.sessions import SessionInterface, SessionMixin
from sid import encode_id, generate_id, get_secure_id
class RedisSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(SessionInterface):
serializer = pickle
session_class = RedisSession
def __init__(
self,
redis=None,
secret=None
):
if redis is None:
redis = Redis()
self.redis = redis
self.secret = secret
def get_session_id(self, request_id):
if self.secret is None:
return request_id
return get_secure_id(self.secret, request_id)
def get_redis_expiration_time(self, app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
print sid
if not sid:
sid = generate_id()
return self.session_class(sid=sid, new=True)
sid = self.get_session_id(sid)
key = 'beaker:' + sid + ':session'
val = self.redis.get(key)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def logout(self, app, request):
sid = self.get_session_id(request.cookies.get(app.session_cookie_name))
if sid is None:
return
self.redis.delete('beaker:' + sid + ':session')
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.redis.delete('beaker:' + session.sid + ':session')
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
key = 'beaker:' + session.sid + ':session'
self.redis.setex(key, val,
int(redis_exp.total_seconds()))
response.set_cookie(app.session_cookie_name,
encode_id(self.secret, session.sid),
expires=cookie_exp, httponly=True,
domain=domain)
| 34.170732 | 79 | 0.613133 |
ab82a956163ad2fd769108234e138bce5f07c63b | 230 | py | Python | config.py | opensourcedcgy/ESZ-disaggregation | c5b00d5bb2c738209a1adfbe9e603e8d8ad1d16d | [
"Apache-2.0"
] | 2 | 2021-06-02T09:54:31.000Z | 2022-03-17T07:37:30.000Z | config.py | opensourcedcgy/ESZ-disaggregation | c5b00d5bb2c738209a1adfbe9e603e8d8ad1d16d | [
"Apache-2.0"
] | 1 | 2021-05-19T14:00:38.000Z | 2021-05-19T14:00:38.000Z | config.py | opensourcedcgy/ESZ-disaggregation | c5b00d5bb2c738209a1adfbe9e603e8d8ad1d16d | [
"Apache-2.0"
] | 1 | 2022-03-17T07:16:07.000Z | 2022-03-17T07:16:07.000Z | # -*- coding: utf-8 -*-
"""Configs of training"""
default_models = {
'BASE_LOAD': 'simple',
'DISHWASHER': 'exp1',
'DRYER': 'exp1',
'HEATING': 'simple',
'REFRIGERATOR': 'exp1',
'WASHING_MACHINE': 'exp1',
}
| 19.166667 | 30 | 0.552174 |
a240e98fc51e15ba0611d8ca1327cc7e07ef1e79 | 1,446 | py | Python | caffe2/python/operator_test/conftest.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 60,067 | 2017-01-18T17:21:31.000Z | 2022-03-31T21:37:45.000Z | caffe2/python/operator_test/conftest.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 66,955 | 2017-01-18T17:21:38.000Z | 2022-03-31T23:56:11.000Z | caffe2/python/operator_test/conftest.py | Hacky-DH/pytorch | 80dc4be615854570aa39a7e36495897d8a040ecc | [
"Intel"
] | 19,210 | 2017-01-18T17:45:04.000Z | 2022-03-31T23:51:56.000Z |
import caffe2.python.serialized_test.serialized_test_util as serial
def pytest_addoption(parser):
parser.addoption(
'-G',
'--generate-serialized',
action='store_true',
dest='generate',
help='generate output files (default=false, compares to current files)',
)
parser.addoption(
'-O',
'--output',
default=serial.DATA_DIR,
dest='output',
help='output directory (default: %(default)s)'
)
parser.addoption(
'-D',
'--disable-serialized-check',
action='store_true',
dest='disable',
help='disable checking serialized tests'
)
parser.addoption(
'-C',
'--disable-gen-coverage',
action='store_true',
dest='disable_coverage',
help='disable generating coverage markdown file'
)
def pytest_configure(config):
generate = config.getoption('generate', default=False)
output = config.getoption('output', default=serial.DATA_DIR)
disable = config.getoption('disable', default=False)
disable_coverage = config.getoption('disable_coverage', default=False)
serial._output_context.__setattr__('should_generate_output', generate)
serial._output_context.__setattr__('output_dir', output)
serial._output_context.__setattr__('disable_serialized_check', disable)
serial._output_context.__setattr__('disable_gen_coverage', disable_coverage)
| 29.510204 | 80 | 0.663209 |
b95a6521d07a5493d2a6ac5e2e26a72d9023f4b9 | 203 | py | Python | Hackathon 4.0_2021-01-08_07-11-48.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | Hackathon 4.0_2021-01-08_07-11-48.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | Hackathon 4.0_2021-01-08_07-11-48.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | # This code is generated automatically by ClointFusion BOT Builder Tool.
import ClointFusion as cf
import time
cf.window_show_desktop()
cf.mouse_click(int(cf.pg.size()[0]/2),int(cf.pg.size()[1]/2))
| 25.375 | 72 | 0.748768 |
c54ed465d1c1a39a883c0a73517ecf00b945aa16 | 5,748 | py | Python | scripts/boot/configure.py | asteroidrush/ar-node | 5dc946a55cf592b663f3febbeed75244eda49f28 | [
"MIT"
] | null | null | null | scripts/boot/configure.py | asteroidrush/ar-node | 5dc946a55cf592b663f3febbeed75244eda49f28 | [
"MIT"
] | null | null | null | scripts/boot/configure.py | asteroidrush/ar-node | 5dc946a55cf592b663f3febbeed75244eda49f28 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import argparse
import json
from os.path import abspath
from time import sleep
from src.accounts import AccountsManager
from src.auth import AuthManager
from src.components import BootNode, Wallet, Cleos, Token
from src.contracts import ContractsManager
from src.process import ProcessManager
'''================= Read configurations ================='''
configs = json.load(open(abspath('./boot_config.json')))
parser = argparse.ArgumentParser()
parser.add_argument('--public-key', metavar='', help="Boot Public Key",
default='EOS6DovkiCze69bSzptXRnth7crDP1J6XvaXu1hJMJfgWdDPC45Fy', dest="public_key")
parser.add_argument('--private-Key', metavar='', help="Boot Private Key",
default='5KfjdDqaKCiDpMern6mGmtL4HNzWiRxRSF5mZUg9uFDrfk3xYT1', dest="private_key")
parser.add_argument('--faucet-public-key', metavar='', help="Faucet public key",
default='EOS7zFCW3qHBoMt6LEjUQGDsZv12fRyb7xNC9hN3nTxK9kix7CEec', dest="faucet_public_key")
parser.add_argument('--data-dir', metavar='', help="Path to data directory", default='')
parser.add_argument('--wallet-dir', metavar='', help="Path to wallet directory", default='./wallet/')
parser.add_argument('--genesis-json', metavar='', help="Path to genesis.json", default="./genesis.json")
parser.add_argument('--keosd', metavar='', help="Path to keosd binary",
default='../../build/programs/keosd/keosd --http-server-address=127.0.0.1:8020 '
'--http-alias=keosd:8020 --http-alias=localhost:8020'
)
parser.add_argument('--nodeos', metavar='', help="Path to nodeos binary",
default='../../build/programs/nodeos/nodeos '
'--http-alias=nodeosd:8000 --http-alias=127.0.0.1:8000 '
'--http-alias=localhost:8000 --http-server-address=0.0.0.0:8000 '
'--bnet-endpoint=0.0.0.0:8001 --p2p-listen-endpoint=0.0.0.0:8002'
)
parser.add_argument('--cleos', metavar='', help="Cleos command",
default='../../build/programs/cleos/cleos --url=http://127.0.0.1:8000 --wallet-url=http://127.0.0.1:8020')
parser.add_argument('--log-path', metavar='', help="Path to log file", default='./output.log')
parser.add_argument('--contracts-dir', metavar='', help="Path to contracts directory", default='../../build/contracts/')
args = parser.parse_args()
'''================= Clear running instances ================='''
ProcessManager.init_log(open(args.log_path, 'a'))
ProcessManager.run('killall keosd nodeos || true')
ProcessManager.sleep(1.5)
'''================= Initialize base components ================='''
node = BootNode(args.nodeos, args.data_dir, args.genesis_json)
node.start(args.public_key, args.private_key)
sleep(2)
cleos = Cleos(args.cleos)
wallet = Wallet(args.keosd, args.wallet_dir, cleos)
wallet.start()
wallet.import_key(args.private_key)
'''================= Blockchain initialization ================='''
auth_manager = AuthManager(cleos)
auth_manager.set_account_permission('eosio', 'createaccnt',
[
{
'pub': args.faucet_public_key,
'weight': 1
}
],
[], "active"
)
auth_manager.set_action_permission('eosio', 'eosio', 'newaccount', 'createaccnt')
contracts_manager = ContractsManager(args.contracts_dir, cleos)
accounts_manager = AccountsManager(wallet, cleos, contracts_manager, configs['tokens'], args.public_key)
accounts_manager.create_system_accounts()
contracts_manager.unlock_contract_uploading("eosio")
contracts_manager.install_base_contracts()
contracts_manager.install_system_contract()
for data in configs['tokens'].values():
token = Token(data['shortcut'], data['max_supply'], data['precision'], cleos)
token.create()
if data['supply']:
token.issue(data['supply'])
accounts_manager.create_accounts(configs['accounts'])
contracts_manager.install_contracts(configs['contracts'])
# Setup parameters blockchain parameters
params_mapping = {
'max_ram': 'setmaxram',
'max_accounts': 'setmaxaccounts',
'payment_bucket_per_year': 'setpaymentbucketperyear'
}
for key, value in configs['params'].items():
if value != 'default':
cleos.run("system %s %s" % (params_mapping[key], value))
# All configs were applied, now we can setup real permissions
for account in configs['accounts']:
permissions = account.get('permissions')
if not permissions:
auth_manager.update_key_auth(account['name'], account['pub'], account['pub'])
continue
if account.get('pub'):
raise Exception("You can't set both pub and permissions fields")
for perm in permissions:
auth_manager.set_account_permission(account['name'], perm['name'], perm['keys'], perm['accounts'], perm.get('parent'))
for action in perm['actions']:
auth_manager.set_action_permission(account['name'], action['code'], action['name'], perm['name'])
for a in AccountsManager.system_accounts:
auth_manager.resign(a, ['eosio'])
if configs['enable_government']:
auth_manager.resign(AccountsManager.government_account,
[account['name'] for account in configs['accounts'] if account['management']])
auth_manager.resign('eosio', [AccountsManager.government_account])
'''================= Lock current process ================='''
ProcessManager.lock_process()
print("Complete")
| 39.641379 | 126 | 0.635699 |
e42e07351ce1ef0f2acc683f5d488cea43eeb33d | 2,043 | py | Python | setup.py | alexdb27/PBxplore | 10877bd5e54d9f40b4a0083c276ee1982c61b417 | [
"MIT"
] | 20 | 2015-03-06T15:02:56.000Z | 2021-09-18T21:52:11.000Z | setup.py | alexdb27/PBxplore | 10877bd5e54d9f40b4a0083c276ee1982c61b417 | [
"MIT"
] | 131 | 2015-01-15T17:04:12.000Z | 2021-01-27T14:45:42.000Z | setup.py | alexdb27/PBxplore | 10877bd5e54d9f40b4a0083c276ee1982c61b417 | [
"MIT"
] | 14 | 2015-03-08T18:36:23.000Z | 2021-09-14T10:54:13.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
readme = f.read()
# Extras requirements for optional dependencies
extras = {
'analysis': ['weblogo>=3.7'],
'all': ['weblogo>=3.7']
}
# Version number must be in sync with the one in pbxplore/__init__.py
setup(
name='pbxplore',
version='1.4.0',
description="PBxplore is a suite of tools dedicated to Protein Block analysis.",
long_description=readme,
url='https://github.com/pierrepo/PBxplore',
# Author details
author='Pierre Poulain',
author_email='pierre.poulain@cupnet.net',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Scientific/Engineering :: Physics',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
setup_requires=['pytest-runner'],
python_requires='>=3.6',
install_requires=['numpy', 'MDAnalysis>=0.11', 'matplotlib'],
tests_require=['pytest', 'pytest-raises', 'coverage'],
# List additional groups of dependencies here
# To install, use
# $ pip install -e .[analysis]
extras_require=extras,
packages=find_packages(exclude=['test']),
include_package_data=True,
package_data={'pbxplore': ['demo/*']},
entry_points={
'console_scripts': [
'PBassign = pbxplore.scripts.PBassign:pbassign_cli',
'PBcount = pbxplore.scripts.PBcount:pbcount_cli',
'PBstat = pbxplore.scripts.PBstat:pbstat_cli',
],
},
)
| 27.24 | 84 | 0.632403 |
f3063349b685b4990c5983e98af36fa45fe103fd | 1,414 | py | Python | 4. Fundamentals/black_scholes.py | conquerv0/Pynance | 02dcfffd0a54374c603baad02ee31bc7ced7f670 | [
"Apache-2.0"
] | 16 | 2020-04-18T17:28:40.000Z | 2022-02-11T01:11:21.000Z | 4. Fundamentals/black_scholes.py | conquerv0/Pynance | 02dcfffd0a54374c603baad02ee31bc7ced7f670 | [
"Apache-2.0"
] | 1 | 2020-07-22T02:22:03.000Z | 2020-07-22T02:22:03.000Z | 4. Fundamentals/black_scholes.py | conquerv0/Pynance | 02dcfffd0a54374c603baad02ee31bc7ced7f670 | [
"Apache-2.0"
] | 6 | 2020-06-29T17:37:47.000Z | 2022-02-13T17:02:39.000Z | import pandas_datareader as pdr
from pandas_datareader import data, wb
from datetime import date
import numpy as np
import pandas as pd
from scipy import log,exp,sqrt,stats
def blackscholes_call(S,E,T,rf,sigma):
"""this function calculates the d1, d2 parameters and derive the normal distribution function
, then return pricing for a call option.
"""
# First, calculate d1, d2.
d1=(log(S/E)+(rf+sigma*sigma/2.0)*T)/(sigma*sqrt(T))
d2 = d1-sigma*sqrt(T)
print(d1)
print(d2)
# Derive the normal distribution function N(x).
return S*stats.norm.cdf(d1)-E*exp(-rf*T)*stats.norm.cdf(d2)
def blackscholes_put(S,E,T,rf,sigma):
"""this function calculates the d1, d2 parameters and derive the normal distribution function
, then return pricing for a put option.
"""
# First, calculate d1, d2.
d1=(log(S/E)+(rf+sigma*sigma/2.0)*T)/(sigma*sqrt(T))
d2 = d1-sigma*sqrt(T)
# Derive the normal distribution function N(x).
return -S*stats.norm.cdf(-d1)+E*exp(-rf*T)*stats.norm.cdf(-d2)
if __name__ == "__main__":
S0=100 #underlying stock price at t=0
E=100 # strike price
T = 1 # expiry 1=1year=365days
rf = 0.05 # risk-free rate
sigma=0.2 # volatility of the underlying stock
print("Call option price according to Black-Scholes model: ",blackscholes_call(S0,E,T,rf,sigma))
print("Put option price according to Black-Scholes model: ",blackscholes_put(S0,E,T,rf,sigma))
| 32.136364 | 97 | 0.71075 |
4857f8f00c7ce007ff7ac367bb5ddd1f0d59e571 | 278 | py | Python | prime.py | Enfioz/enpands-problems | ac6b8904706e3acb50077e3ffe08f39f0b5b31be | [
"MIT"
] | null | null | null | prime.py | Enfioz/enpands-problems | ac6b8904706e3acb50077e3ffe08f39f0b5b31be | [
"MIT"
] | null | null | null | prime.py | Enfioz/enpands-problems | ac6b8904706e3acb50077e3ffe08f39f0b5b31be | [
"MIT"
] | null | null | null | # Patick Corcoran
# Chekc if a number is prime.
# The primes are 2, 3, 5, 7, 11, 13, ...
p = 347
isprime = True
for m in range(2, p-1):
if p % m == 0:
isprime = False
break
if isprime:
print(p, "is a prime number.")
else:
print(p, "is not prime,") | 17.375 | 40 | 0.557554 |
f34c3ce15ff254617d435888dd27e75dec076acc | 8,557 | py | Python | intersight/model/mo_tag_key_summary.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 5 | 2021-12-16T15:13:32.000Z | 2022-03-29T16:09:54.000Z | intersight/model/mo_tag_key_summary.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 4 | 2022-01-25T19:05:51.000Z | 2022-03-29T20:18:37.000Z | intersight/model/mo_tag_key_summary.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 2 | 2020-07-07T15:01:08.000Z | 2022-01-31T04:27:35.000Z | """
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class MoTagKeySummary(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'key': (str,), # noqa: E501
'num_keys': (int,), # noqa: E501
'values': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'key': 'Key', # noqa: E501
'num_keys': 'NumKeys', # noqa: E501
'values': 'Values', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""MoTagKeySummary - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
key (str): The tag key for which usage information is provided.. [optional] # noqa: E501
num_keys (int): The number of times this tag Key has been set in an API resource.. [optional] # noqa: E501
values ([str]): A list of all Tag values that have been assigned to this tag Key.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| 49.178161 | 1,678 | 0.629777 |
ca896244bfe2c8c2c3b5e3011c45d4b6935af15c | 1,483 | py | Python | iron-unconditioned-reflexes/NeuralNetwork/NeuralNetwork.py | zshimanchik/iron-unconditioned-reflexes | 22897bad377361ca85aac6d6e8ab1c6eaa069032 | [
"MIT"
] | null | null | null | iron-unconditioned-reflexes/NeuralNetwork/NeuralNetwork.py | zshimanchik/iron-unconditioned-reflexes | 22897bad377361ca85aac6d6e8ab1c6eaa069032 | [
"MIT"
] | 5 | 2016-02-15T16:28:33.000Z | 2016-05-04T20:47:10.000Z | iron-unconditioned-reflexes/NeuralNetwork/NeuralNetwork.py | zshimanchik/iron-unconditioned-reflexes | 22897bad377361ca85aac6d6e8ab1c6eaa069032 | [
"MIT"
] | null | null | null | from Layer import InputLayer, Layer, Readiness
class NeuralNetwork:
def __init__(self, shape):
self.layers = []
self.shape = shape
self.time = 0
self.input_layer = InputLayer(shape[0])
self.layers.append(self.input_layer)
prev_layer = self.input_layer
for i in range(1, len(shape)-1):
cur_layer = Layer(shape[i], prev_layer.neurons)
prev_layer.listeners.append(cur_layer)
self.layers.append(cur_layer)
prev_layer = cur_layer
self.output_layer = Layer(shape[-1], prev_layer.neurons)
prev_layer.listeners.append(self.output_layer)
self.layers.append(self.output_layer)
self._reset_layers_states()
def __len__(self):
return len(self.shape)
def __getitem__(self, i):
return self.layers[i]
def calculate(self, x):
"""
:param x: list of input values
:return: list of values which is result of network calculation
"""
self.input_layer.input_values = x
done = False
while not done:
done = True
for layer in self:
if layer.ready_to_calculate == Readiness.READY:
layer.calculate()
done = False
self._reset_layers_states()
return self.output_layer.get_output_values()
def _reset_layers_states(self):
for layer in self:
layer.reset_state() | 29.078431 | 70 | 0.597438 |
23ccbdfb4f65d41fe11775768d0f96600138385b | 2,389 | py | Python | cupy/manipulation/basic.py | andravin/cupy | 833a38b5f797c9d44548847f17469117dc8856a4 | [
"MIT"
] | 1 | 2019-12-01T09:08:14.000Z | 2019-12-01T09:08:14.000Z | cupy/manipulation/basic.py | hephaex/cupy | 5cf50a93bbdebe825337ed7996c464e84b1495ba | [
"MIT"
] | 1 | 2019-08-05T09:36:13.000Z | 2019-08-06T12:03:01.000Z | cupy/manipulation/basic.py | hephaex/cupy | 5cf50a93bbdebe825337ed7996c464e84b1495ba | [
"MIT"
] | 1 | 2022-03-24T13:19:55.000Z | 2022-03-24T13:19:55.000Z | import numpy
import six
from cupy import core
from cupy.core import fusion
from cupy.sorting import search
def copyto(dst, src, casting='same_kind', where=None):
"""Copies values from one array to another with broadcasting.
This function can be called for arrays on different devices. In this case,
casting, ``where``, and broadcasting is not supported, and an exception is
raised if these are used.
Args:
dst (cupy.ndarray): Target array.
src (cupy.ndarray): Source array.
casting (str): Casting rule. See :func:`numpy.can_cast` for detail.
where (cupy.ndarray of bool): If specified, this array acts as a mask,
and an element is copied only if the corresponding element of
``where`` is True.
.. seealso:: :func:`numpy.copyto`
"""
src_type = type(src)
src_is_python_scalar = (src_type in six.integer_types or
src_type in (bool, float, complex) or
src_type is fusion._FusionVarScalar)
if src_is_python_scalar:
src_dtype = numpy.dtype(type(src))
can_cast = numpy.can_cast(src, dst.dtype, casting)
else:
src_dtype = src.dtype
can_cast = numpy.can_cast(src_dtype, dst.dtype, casting)
if not can_cast:
raise TypeError('Cannot cast %s to %s in %s casting mode' %
(src_dtype, dst.dtype, casting))
if fusion._is_fusing():
if where is None:
core.elementwise_copy(src, dst)
else:
fusion._call_ufunc(search._where_ufunc, where, src, dst, dst)
return
if dst.size == 0:
return
if src_is_python_scalar and where is None:
dst.fill(src)
return
if where is None:
if _can_memcpy(dst, src):
dst.data.copy_from(src.data, src.nbytes)
else:
device = dst.device
with device:
if src.device != device:
src = src.copy()
core.elementwise_copy(src, dst)
else:
core.elementwise_copy_where(src, where, dst)
def _can_memcpy(dst, src):
c_contiguous = dst.flags.c_contiguous and src.flags.c_contiguous
f_contiguous = dst.flags.f_contiguous and src.flags.f_contiguous
return (c_contiguous or f_contiguous) and dst.dtype == src.dtype and \
dst.size == src.size
| 32.283784 | 78 | 0.619506 |
8c51b0a525926cfe17bcba91235f40185d972c44 | 6,361 | py | Python | python/mxnet/gluon/nn/activations.py | paulk-asert/incubator-mxnet | 6acf7e6a051e75d9f1cca0ec3c198c38c0f6a3fe | [
"Apache-2.0"
] | 228 | 2018-12-06T09:34:01.000Z | 2022-03-08T17:02:02.000Z | python/mxnet/gluon/nn/activations.py | urantialife/incubator-mxnet | bcff49888fdaae6b9922de4d4712d505cf33c596 | [
"Apache-2.0"
] | 29 | 2020-09-05T00:57:25.000Z | 2022-02-26T14:48:52.000Z | python/mxnet/gluon/nn/activations.py | urantialife/incubator-mxnet | bcff49888fdaae6b9922de4d4712d505cf33c596 | [
"Apache-2.0"
] | 34 | 2018-12-14T02:59:53.000Z | 2022-01-22T14:15:19.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""Basic neural network layers."""
__all__ = ['Activation', 'LeakyReLU', 'PReLU', 'ELU', 'SELU', 'Swish', 'GELU']
from ... import initializer
from ..block import HybridBlock
class Activation(HybridBlock):
r"""Applies an activation function to input.
Parameters
----------
activation : str
Name of activation function to use.
See :func:`~mxnet.ndarray.Activation` for available choices.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, activation, **kwargs):
self._act_type = activation
super(Activation, self).__init__(**kwargs)
def _alias(self):
return self._act_type
def hybrid_forward(self, F, x):
return F.Activation(x, act_type=self._act_type, name='fwd')
def __repr__(self):
s = '{name}({_act_type})'
return s.format(name=self.__class__.__name__,
**self.__dict__)
class LeakyReLU(HybridBlock):
r"""Leaky version of a Rectified Linear Unit.
It allows a small gradient when the unit is not active
.. math::
f\left(x\right) = \left\{
\begin{array}{lr}
\alpha x & : x \lt 0 \\
x & : x \geq 0 \\
\end{array}
\right.\\
Parameters
----------
alpha : float
slope coefficient for the negative half axis. Must be >= 0.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, alpha, **kwargs):
assert alpha >= 0, "Slope coefficient for LeakyReLU must be no less than 0."
super(LeakyReLU, self).__init__(**kwargs)
self._alpha = alpha
def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='leaky', slope=self._alpha, name='fwd')
def __repr__(self):
s = '{name}({alpha})'
return s.format(name=self.__class__.__name__,
alpha=self._alpha)
class PReLU(HybridBlock):
r"""Parametric leaky version of a Rectified Linear Unit.
<https://arxiv.org/abs/1502.01852>`_ paper.
It learns a gradient when the unit is not active
.. math::
f\left(x\right) = \left\{
\begin{array}{lr}
\alpha x & : x \lt 0 \\
x & : x \geq 0 \\
\end{array}
\right.\\
where alpha is a learned parameter.
Parameters
----------
alpha_initializer : Initializer
Initializer for the `embeddings` matrix.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, alpha_initializer=initializer.Constant(0.25), **kwargs):
super(PReLU, self).__init__(**kwargs)
with self.name_scope():
self.alpha = self.params.get('alpha', shape=(1,), init=alpha_initializer)
def hybrid_forward(self, F, x, alpha):
return F.LeakyReLU(x, gamma=alpha, act_type='prelu', name='fwd')
class ELU(HybridBlock):
r"""
Exponential Linear Unit (ELU)
"Fast and Accurate Deep Network Learning by Exponential Linear Units", Clevert et al, 2016
https://arxiv.org/abs/1511.07289
Published as a conference paper at ICLR 2016
Parameters
----------
alpha : float
The alpha parameter as described by Clevert et al, 2016
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, alpha=1.0, **kwargs):
super(ELU, self).__init__(**kwargs)
self._alpha = alpha
def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='elu', slope=self._alpha)
class SELU(HybridBlock):
r"""
Scaled Exponential Linear Unit (SELU)
"Self-Normalizing Neural Networks", Klambauer et al, 2017
https://arxiv.org/abs/1706.02515
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, **kwargs):
super(SELU, self).__init__(**kwargs)
def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='selu', name='fwd')
class GELU(HybridBlock):
r"""
Gaussian Exponential Linear Unit (GELU)
"Gaussian Error Linear Units (GELUs)", Hendrycks et al, 2016
https://arxiv.org/abs/1606.08415
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, **kwargs):
super(GELU, self).__init__(**kwargs)
def hybrid_forward(self, F, x):
return F.LeakyReLU(x, act_type='gelu', name='fwd')
class Swish(HybridBlock):
r"""
Swish Activation function
https://arxiv.org/pdf/1710.05941.pdf
Parameters
----------
beta : float
swish(x) = x * sigmoid(beta*x)
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`.
"""
def __init__(self, beta=1.0, **kwargs):
super(Swish, self).__init__(**kwargs)
self._beta = beta
def hybrid_forward(self, F, x):
return x * F.sigmoid(self._beta * x, name='fwd')
| 27.899123 | 98 | 0.611539 |
82f667c2972bf4eb56b199d7b9b35515e8d74cca | 1,183 | py | Python | Whatsapp Bot with Loop.py | Hetansh-patel/Whataspp-bot | b97ede21a140d809328159dfd08beeff553787cb | [
"Unlicense"
] | null | null | null | Whatsapp Bot with Loop.py | Hetansh-patel/Whataspp-bot | b97ede21a140d809328159dfd08beeff553787cb | [
"Unlicense"
] | null | null | null | Whatsapp Bot with Loop.py | Hetansh-patel/Whataspp-bot | b97ede21a140d809328159dfd08beeff553787cb | [
"Unlicense"
] | null | null | null | # Bot to send message in WhatasppWeb
# Importing webdriver from selenium
from selenium import webdriver
import time
# Giving a path of chromedriver and website to direct
chrome_browser = webdriver.Chrome(
executable_path = 'P:/chromedriver.exe'
)
chrome_browser.get('https://web.whatsapp.com/')
# Pausing time till QR code is scanned
time.sleep(15)
# Taking input and storing in variables
user_name = input("group or person you want to send the message : ")
msg = input("enter the message you want to send : ")
count = int(input("enter the number of times you want to print the message"))
# Giving path to scan name that is given by user and then clicking on it
user = chrome_browser.find_element_by_xpath('//span[@title="{}"]'.format(user_name))
user.click()
# # Clicking on button to send the printed message
message_box = chrome_browser.find_element_by_xpath('//div[@class="_3uMse"]')
# Loop to print message multiple times
for index in range(count):
message_box.send_keys(msg)
# Clicking on button to send the printed message
chrome_browser.find_element_by_xpath('//button[@class="_1U1xa"]').click()
print("success") | 35.848485 | 85 | 0.727811 |
5dc6d396f3cb22ee822eb9b23a50aca1efb8ae0e | 5,934 | py | Python | download.py | superChoi7/DeepJ | b4d0964079daf9ce5fb15aface3cc0efcc4a01bf | [
"MIT"
] | 1 | 2021-12-31T19:08:16.000Z | 2021-12-31T19:08:16.000Z | download.py | superChoi7/DeepJ | b4d0964079daf9ce5fb15aface3cc0efcc4a01bf | [
"MIT"
] | null | null | null | download.py | superChoi7/DeepJ | b4d0964079daf9ce5fb15aface3cc0efcc4a01bf | [
"MIT"
] | 1 | 2022-02-28T00:42:50.000Z | 2022-02-28T00:42:50.000Z | import os
from pathlib import Path
styles = [
[
'data/classical/beethoven',
'data/classical/haydn',
'data/classical/mozart'
],
[
'data/romantic/borodin',
'data/romantic/brahms',
'data/romantic/tschai'
]
]
dataurls = [
[
[
'http://www.piano-midi.de/midis/beethoven/beethoven_opus10_1_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_opus10_2_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_opus10_3_format0.mid',
'http://www.piano-midi.de/midis/beethoven/pathetique_1_format0.mid',
'http://www.piano-midi.de/midis/beethoven/pathetique_2_format0.mid',
'http://www.piano-midi.de/midis/beethoven/pathetique_3_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_opus22_1_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_opus22_2_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_opus22_3_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_hammerklavier_1_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_hammerklavier_2_format0.mid',
'http://www.piano-midi.de/midis/beethoven/beethoven_hammerklavier_3_format0.mid'
],
[
'http://www.piano-midi.de/midis/haydn/haydn_7_1_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_7_2_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_7_3_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_8_1_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_8_2_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_8_3_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_8_4_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_9_1_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_9_2_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_9_3_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_43_1_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_43_2_format0.mid',
'http://www.piano-midi.de/midis/haydn/haydn_43_3_format0.mid'
],
[
'http://www.piano-midi.de/midis/mozart/mz_311_1_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_311_2_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_311_3_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_330_1_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_330_2_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_330_3_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_331_1_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_331_2_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_331_3_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_570_1_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_570_2_format0.mid',
'http://www.piano-midi.de/midis/mozart/mz_570_3_format0.mid'
]
],
[
[
'http://www.piano-midi.de/midis/borodin/bor_ps1_format0.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps2_format0.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps3_format0.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps1_format4.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps1_format5.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps6_format0.mid',
'http://www.piano-midi.de/midis/borodin/bor_ps7_format0.mid'
],
[
'http://www.piano-midi.de/midis/brahms/brahms_opus1_1_format0.mid',
'http://www.piano-midi.de/midis/brahms/brahms_opus1_2_format0.mid',
'http://www.piano-midi.de/midis/brahms/brahms_opus1_3_format0.mid',
'http://www.piano-midi.de/midis/brahms/brahms_opus1_4_format0.mid',
'http://www.piano-midi.de/midis/brahms/br_im2_format0.mid',
'http://www.piano-midi.de/midis/brahms/br_im5_format0.mid',
'http://www.piano-midi.de/midis/brahms/br_im6_format0.mid',
'http://www.piano-midi.de/midis/brahms/brahms_opus117_1_format0.mid',
'http://www.piano-midi.de/midis/brahms/brahms_opus117_2_format0.mid',
'http://www.piano-midi.de/midis/brahms/br_rhap_format0.mid'
],
[
'http://www.piano-midi.de/midis/tchaikovsky/ty_januar_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_februar_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_maerz_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_april_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_mai_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_juni_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_juli_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_august_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_september_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_oktober_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_november_format0.mid',
'http://www.piano-midi.de/midis/tchaikovsky/ty_dezember_format0.mid'
]
]
]
def downloadDataset(styles, dataurls):
for i, style in enumerate(styles):
for j, dir in enumerate(style):
Path(dir).mkdir(parents=True, exist_ok=True)
for item in dataurls[i][j]:
os.system('wget -q {} -P {}'.format(item, dir))
if __name__ == '__main__':
downloadDataset(styles, dataurls) | 53.459459 | 93 | 0.640546 |
697a2b04c33846e7560244fd3dc6d18b31fdd5e8 | 23,974 | py | Python | python/ambassador/ir/irhttpmapping.py | imoisharma/emissary | 5346ccb06673827a6a2e51ddaf92925f60bd9de9 | [
"Apache-2.0"
] | null | null | null | python/ambassador/ir/irhttpmapping.py | imoisharma/emissary | 5346ccb06673827a6a2e51ddaf92925f60bd9de9 | [
"Apache-2.0"
] | null | null | null | python/ambassador/ir/irhttpmapping.py | imoisharma/emissary | 5346ccb06673827a6a2e51ddaf92925f60bd9de9 | [
"Apache-2.0"
] | null | null | null | from ambassador.utils import RichStatus
from ambassador.utils import ParsedService as Service
from typing import Any, ClassVar, Dict, List, Optional, Type, Union, TYPE_CHECKING
from ..config import Config
from .irbasemapping import IRBaseMapping, normalize_service_name
from .irbasemappinggroup import IRBaseMappingGroup
from .irhttpmappinggroup import IRHTTPMappingGroup
from .irerrorresponse import IRErrorResponse
from .ircors import IRCORS
from .irretrypolicy import IRRetryPolicy
import hashlib
if TYPE_CHECKING:
from .ir import IR # pragma: no cover
# Kind of cheating here so that it's easy to json-serialize key-value pairs (including with regex)
class KeyValueDecorator (dict):
def __init__(self, name: str, value: Optional[str]=None, regex: Optional[bool]=False) -> None:
super().__init__()
self.name = name
self.value = value
self.regex = regex
def __getattr__(self, key: str) -> Any:
return self[key]
def __setattr__(self, key: str, value: Any) -> None:
self[key] = value
def _get_value(self) -> str:
return self.value or '*'
def length(self) -> int:
return len(self.name) + len(self._get_value()) + (1 if self.regex else 0)
def key(self) -> str:
return self.name + '-' + self._get_value()
class IRHTTPMapping (IRBaseMapping):
prefix: str
headers: List[KeyValueDecorator]
add_request_headers: Dict[str, str]
add_response_headers: Dict[str, str]
method: Optional[str]
service: str
group_id: str
route_weight: List[Union[str, int]]
cors: IRCORS
retry_policy: IRRetryPolicy
error_response_overrides: Optional[IRErrorResponse]
query_parameters: List[KeyValueDecorator]
regex_rewrite: Dict[str,str]
# Keys that are present in AllowedKeys are allowed to be set from kwargs.
# If the value is True, we'll look for a default in the Ambassador module
# if the key is missing. If the value is False, a missing key will simply
# be unset.
#
# Do not include any named parameters (like 'precedence' or 'rewrite').
#
# Any key here will be copied into the mapping. Keys where the only
# processing is to set something else (like 'host' and 'method', whose
# which only need to set the ':authority' and ':method' headers) must
# _not_ be included here. Keys that need to be copied _and_ have special
# processing (like 'service', which must be copied and used to wrangle
# Linkerd headers) _do_ need to be included.
AllowedKeys: ClassVar[Dict[str, bool]] = {
"add_linkerd_headers": False,
# Do not include add_request_headers and add_response_headers
"auto_host_rewrite": False,
"bypass_auth": False,
"auth_context_extensions": False,
"bypass_error_response_overrides": False,
"case_sensitive": False,
"circuit_breakers": False,
"cluster_idle_timeout_ms": False,
"cluster_max_connection_lifetime_ms": False,
# Do not include cluster_tag
"connect_timeout_ms": False,
"cors": False,
"docs": False,
"enable_ipv4": False,
"enable_ipv6": False,
"error_response_overrides": False,
"grpc": False,
# Do not include headers
# Do not include host
# Do not include hostname
"host_redirect": False,
"host_regex": False,
"host_rewrite": False,
"idle_timeout_ms": False,
"keepalive": False,
"labels": False, # Not supported in v0; requires v1+; handled in setup
"load_balancer": False,
"metadata_labels": False,
# Do not include method
"method_regex": False,
"path_redirect": False,
"prefix_redirect": False,
"regex_redirect": False,
"redirect_response_code": False,
# Do not include precedence
"prefix": False,
"prefix_exact": False,
"prefix_regex": False,
"priority": False,
"rate_limits": False, # Only supported in v0; replaced by "labels" in v1; handled in setup
# Do not include regex_headers
"remove_request_headers": True,
"remove_response_headers": True,
"resolver": False,
"retry_policy": False,
# Do not include rewrite
"service": False, # See notes above
"shadow": False,
"stats_name": True,
"timeout_ms": False,
"tls": False,
"use_websocket": False,
"allow_upgrade": False,
"weight": False,
# Include the serialization, too.
"serialization": False,
}
def __init__(self, ir: 'IR', aconf: Config,
rkey: str, # REQUIRED
name: str, # REQUIRED
location: str, # REQUIRED
service: str, # REQUIRED
namespace: Optional[str] = None,
metadata_labels: Optional[Dict[str, str]] = None,
kind: str="IRHTTPMapping",
apiVersion: str="getambassador.io/v3alpha1", # Not a typo! See below.
precedence: int=0,
rewrite: str="/",
cluster_tag: Optional[str]=None,
**kwargs) -> None:
# OK, this is a bit of a pain. We want to preserve the name and rkey and
# such here, unlike most kinds of IRResource, so we shallow copy the keys
# we're going to allow from the incoming kwargs.
#
# NOTE WELL: things that we explicitly process below should _not_ be listed in
# AllowedKeys. The point of AllowedKeys is this loop below.
new_args = {}
# When we look up defaults, use lookup class "httpmapping"... and yeah, we need the
# IR, too.
self.default_class = "httpmapping"
self.ir = ir
for key, check_defaults in IRHTTPMapping.AllowedKeys.items():
# Do we have a keyword arg for this key?
if key in kwargs:
# Yes, it wins.
value = kwargs[key]
new_args[key] = value
elif check_defaults:
# No value in kwargs, but we're allowed to check defaults for it.
value = self.lookup_default(key)
if value is not None:
new_args[key] = value
# add_linkerd_headers is special, because we support setting it as a default
# in the bare Ambassador module. We should really toss this and use the defaults
# mechanism, but not for 1.4.3.
if "add_linkerd_headers" not in new_args:
# They didn't set it explicitly, so check for the older way.
add_linkerd_headers = self.ir.ambassador_module.get('add_linkerd_headers', None)
if add_linkerd_headers != None:
new_args["add_linkerd_headers"] = add_linkerd_headers
# OK. On to set up the headers (since we need them to compute our group ID).
hdrs = []
query_parameters = []
regex_rewrite = kwargs.get('regex_rewrite', {})
# Start by assuming that nothing in our arguments mentions hosts (so no host and no host_regex).
host = None
host_regex = False
# Also start self.host as unspecified.
self.host = None
# OK. Start by looking for a :authority header match.
if 'headers' in kwargs:
for name, value in kwargs.get('headers', {}).items():
if value is True:
hdrs.append(KeyValueDecorator(name))
else:
# An exact match on the :authority header is special -- treat it like
# they set the "host" element (but note that we'll allow the actual
# "host" element to override it later).
if name.lower() == ':authority':
# This is an _exact_ match, so it mustn't contain a "*" -- that's illegal in the DNS.
if "*" in value:
# We can't call self.post_error() yet, because we're not initialized yet. So we cheat a bit
# and defer the error for later.
new_args["_deferred_error"] = f":authority exact-match '{value}' contains *, which cannot match anything."
ir.logger.debug("IRHTTPMapping %s: self.host contains * (%s, :authority)", name, value)
else:
# No globs, just save it. (We'll end up using it as a glob later, in the Envoy
# config part of the world, but that's OK -- a glob with no "*" in it will always
# match only itself.)
host = value
ir.logger.debug("IRHTTPMapping %s: self.host == %s (:authority)", name, self.host)
# DO NOT save the ':authority' match here -- we'll pick it up after we've checked
# for hostname, too.
else:
# It's not an :authority match, so we're good.
hdrs.append(KeyValueDecorator(name, value))
if 'regex_headers' in kwargs:
# DON'T do anything special with a regex :authority match: we can't
# do host-based filtering within the IR for it anyway.
for name, value in kwargs.get('regex_headers', {}).items():
hdrs.append(KeyValueDecorator(name, value, regex=True))
if 'host' in kwargs:
# It's deliberate that we'll allow kwargs['host'] to silently override an exact :authority
# header match.
host = kwargs['host']
host_regex = kwargs.get('host_regex', False)
# If it's not a regex, it's an exact match -- make sure it doesn't contain a '*'.
if not host_regex:
if "*" in host:
# We can't call self.post_error() yet, because we're not initialized yet. So we cheat a bit
# and defer the error for later.
new_args["_deferred_error"] = f"host exact-match {host} contains *, which cannot match anything."
ir.logger.debug("IRHTTPMapping %s: self.host contains * (%s, host)", name, host)
else:
ir.logger.debug("IRHTTPMapping %s: self.host == %s (host)", name, self.host)
# Finally, check for 'hostname'.
if 'hostname' in kwargs:
# It's deliberate that we allow kwargs['hostname'] to override anything else -- even a regex host.
# Yell about it, though.
if host:
ir.logger.warning("Mapping %s in namespace %s: both host and hostname are set, using hostname and ignoring host", name, namespace)
# No need to be so careful about "*" here, since hostname is defined to be a glob.
host = kwargs['hostname']
host_regex = False
ir.logger.debug("IRHTTPMapping %s: self.host gl~ %s (hostname)", name, self.host)
# If we have a host, include a ":authority" match. We're treating this as if it were
# an exact match, but that's because the ":authority" match is handling specially by
# Envoy.
if host:
hdrs.append(KeyValueDecorator(":authority", host, host_regex))
# Finally, if our host isn't a regex, save it in self.host.
if not host_regex:
self.host = host
if 'method' in kwargs:
hdrs.append(KeyValueDecorator(":method", kwargs['method'], kwargs.get('method_regex', False)))
if 'use_websocket' in new_args:
allow_upgrade = new_args.setdefault('allow_upgrade', [])
if 'websocket' not in allow_upgrade:
allow_upgrade.append('websocket')
del new_args['use_websocket']
# Next up: figure out what headers we need to add to each request. Again, if the key
# is present in kwargs, the kwargs value wins -- this is important to allow explicitly
# setting a value of `{}` to override a default!
add_request_hdrs: dict
add_response_hdrs: dict
if 'add_request_headers' in kwargs:
add_request_hdrs = kwargs['add_request_headers']
else:
add_request_hdrs = self.lookup_default('add_request_headers', {})
if 'add_response_headers' in kwargs:
add_response_hdrs = kwargs['add_response_headers']
else:
add_response_hdrs = self.lookup_default('add_response_headers', {})
# Remember that we may need to add the Linkerd headers, too.
add_linkerd_headers = new_args.get('add_linkerd_headers', False)
# XXX The resolver lookup code is duplicated from IRBaseMapping.setup --
# needs to be fixed after 1.6.1.
resolver_name = kwargs.get('resolver') or self.ir.ambassador_module.get('resolver', 'kubernetes-service')
assert(resolver_name) # for mypy -- resolver_name cannot be None at this point
resolver = self.ir.get_resolver(resolver_name)
if resolver:
resolver_kind = resolver.kind
else:
# In IRBaseMapping.setup, we post an error if the resolver is unknown.
# Here, we just don't bother; we're only using it for service
# qualification.
resolver_kind = 'KubernetesBogusResolver'
service = normalize_service_name(ir, service, namespace, resolver_kind, rkey=rkey)
self.ir.logger.debug(f"Mapping {name} service qualified to {repr(service)}")
svc = Service(ir.logger, service)
if add_linkerd_headers:
add_request_hdrs['l5d-dst-override'] = svc.hostname_port
# XXX BRUTAL HACK HERE:
# If we _don't_ have an origination context, but our IR has an agent_origination_ctx,
# force TLS origination because it's the agent. I know, I know. It's a hack.
if ('tls' not in new_args) and ir.agent_origination_ctx:
ir.logger.debug(f"Mapping {name}: Agent forcing origination TLS context to {ir.agent_origination_ctx.name}")
new_args['tls'] = ir.agent_origination_ctx.name
if 'query_parameters' in kwargs:
for name, value in kwargs.get('query_parameters', {}).items():
if value is True:
query_parameters.append(KeyValueDecorator(name))
else:
query_parameters.append(KeyValueDecorator(name, value))
if 'regex_query_parameters' in kwargs:
for name, value in kwargs.get('regex_query_parameters', {}).items():
query_parameters.append(KeyValueDecorator(name, value, regex=True))
if 'regex_rewrite' in kwargs:
if rewrite and rewrite != "/":
self.ir.aconf.post_notice("Cannot specify both rewrite and regex_rewrite: using regex_rewrite and ignoring rewrite")
rewrite = ""
rewrite_items = kwargs.get('regex_rewrite', {})
regex_rewrite = {'pattern' : rewrite_items.get('pattern',''),
'substitution' : rewrite_items.get('substitution','') }
# ...and then init the superclass.
super().__init__(
ir=ir, aconf=aconf, rkey=rkey, location=location, service=service,
kind=kind, name=name, namespace=namespace, metadata_labels=metadata_labels,
apiVersion=apiVersion, headers=hdrs, add_request_headers=add_request_hdrs, add_response_headers = add_response_hdrs,
precedence=precedence, rewrite=rewrite, cluster_tag=cluster_tag,
query_parameters=query_parameters,
regex_rewrite=regex_rewrite,
**new_args
)
if 'outlier_detection' in kwargs:
self.post_error(RichStatus.fromError("outlier_detection is not supported"))
@staticmethod
def group_class() -> Type[IRBaseMappingGroup]:
return IRHTTPMappingGroup
def _enforce_mutual_exclusion(self, preferred, other):
if preferred in self and other in self:
self.ir.aconf.post_error(f"Cannot specify both {preferred} and {other}. Using {preferred} and ignoring {other}.", resource=self)
del self[other]
def setup(self, ir: 'IR', aconf: Config) -> bool:
# First things first: handle any deferred error.
_deferred_error = self.get("_deferred_error")
if _deferred_error:
self.post_error(_deferred_error)
return False
if not super().setup(ir, aconf):
return False
# If we have CORS stuff, normalize it.
if 'cors' in self:
self.cors = IRCORS(ir=ir, aconf=aconf, location=self.location, **self.cors)
if self.cors:
self.cors.referenced_by(self)
else:
return False
# If we have RETRY_POLICY stuff, normalize it.
if 'retry_policy' in self:
self.retry_policy = IRRetryPolicy(ir=ir, aconf=aconf, location=self.location, **self.retry_policy)
if self.retry_policy:
self.retry_policy.referenced_by(self)
else:
return False
# If we have error response overrides, generate an IR for that too.
if 'error_response_overrides' in self:
self.error_response_overrides = IRErrorResponse(self.ir, aconf,
self.get('error_response_overrides', None),
location=self.location)
#if self.error_response_overrides.setup(self.ir, aconf):
if self.error_response_overrides:
self.error_response_overrides.referenced_by(self)
else:
return False
# Likewise, labels is supported only in V1+:
if 'labels' in self:
if self.apiVersion == 'getambassador.io/v0':
self.post_error("labels not supported in getambassador.io/v0 Mapping resources")
return False
if 'rate_limits' in self:
if self.apiVersion != 'getambassador.io/v0':
self.post_error("rate_limits supported only in getambassador.io/v0 Mapping resources")
return False
# Let's turn this into a set of labels instead.
labels = []
rlcount = 0
for rate_limit in self.pop('rate_limits', []):
rlcount += 1
# Since this is a V0 Mapping, prepend the static default stuff that we were implicitly
# forcing back in the pre-0.50 days.
label: List[Any] = [
'source_cluster',
'destination_cluster',
'remote_address'
]
# Next up: old rate_limit "descriptor" becomes label "generic_key".
rate_limit_descriptor = rate_limit.get('descriptor', None)
if rate_limit_descriptor:
label.append({ 'generic_key': rate_limit_descriptor })
# Header names get turned into omit-if-not-present header dictionaries.
rate_limit_headers = rate_limit.get('headers', [])
for rate_limit_header in rate_limit_headers:
label.append({
rate_limit_header: {
'header': rate_limit_header,
'omit_if_not_present': True
}
})
labels.append({
'v0_ratelimit_%02d' % rlcount: label
})
if labels:
domain = 'ambassador' if not ir.ratelimit else ir.ratelimit.domain
self['labels'] = { domain: labels }
if self.get('load_balancer', None) is not None:
if not self.validate_load_balancer(self['load_balancer']):
self.post_error("Invalid load_balancer specified: {}, invalidating mapping".format(self['load_balancer']))
return False
# All three redirect fields are mutually exclusive.
#
# Prefer path_redirect over the other two. If only prefix_redirect and
# regex_redirect are set, prefer prefix_redirect. There's no exact
# reason for this, only to arbitrarily prefer "less fancy" features.
self._enforce_mutual_exclusion('path_redirect', 'prefix_redirect')
self._enforce_mutual_exclusion('path_redirect', 'regex_redirect')
self._enforce_mutual_exclusion('prefix_redirect', 'regex_redirect')
ir.logger.debug("Mapping %s: setup OK: host %s hostname %s regex %s",
self.name, self.get('host'), self.get('hostname'), self.get('host_regex'))
return True
@staticmethod
def validate_load_balancer(load_balancer) -> bool:
lb_policy = load_balancer.get('policy', None)
is_valid = False
if lb_policy in ['round_robin', 'least_request']:
if len(load_balancer) == 1:
is_valid = True
elif lb_policy in ['ring_hash', 'maglev']:
if len(load_balancer) == 2:
if 'cookie' in load_balancer:
cookie = load_balancer.get('cookie')
if 'name' in cookie:
is_valid = True
elif 'header' in load_balancer:
is_valid = True
elif 'source_ip' in load_balancer:
is_valid = True
return is_valid
def _group_id(self) -> str:
# Yes, we're using a cryptographic hash here. Cope. [ :) ]
h = hashlib.new('sha1')
# This is an HTTP mapping.
h.update('HTTP-'.encode('utf-8'))
# method first, but of course method might be None. For calculating the
# group_id, 'method' defaults to 'GET' (for historical reasons).
method = self.get('method') or 'GET'
h.update(method.encode('utf-8'))
h.update(self.prefix.encode('utf-8'))
for hdr in self.headers:
h.update(hdr.name.encode('utf-8'))
if hdr.value is not None:
h.update(hdr.value.encode('utf-8'))
for query_parameter in self.query_parameters:
h.update(query_parameter.name.encode('utf-8'))
if query_parameter.value is not None:
h.update(query_parameter.value.encode('utf-8'))
if self.precedence != 0:
h.update(str(self.precedence).encode('utf-8'))
return h.hexdigest()
def _route_weight(self) -> List[Union[str, int]]:
len_headers = 0
len_query_parameters = 0
for hdr in self.headers:
len_headers += hdr.length()
for query_parameter in self.query_parameters:
len_query_parameters += query_parameter.length()
# For calculating the route weight, 'method' defaults to '*' (for historical reasons).
weight = [ self.precedence, len(self.prefix), len_headers, len_query_parameters, self.prefix, self.get('method', 'GET') ]
weight += [ hdr.key() for hdr in self.headers ]
weight += [ query_parameter.key() for query_parameter in self.query_parameters]
return weight
def summarize_errors(self) -> str:
errors = self.ir.aconf.errors.get(self.rkey, [])
errstr = "(no errors)"
if errors:
errstr = errors[0].get('error') or 'unknown error?'
if len(errors) > 1:
errstr += " (and more)"
return errstr
def status(self) -> Dict[str, str]:
if not self.is_active():
return { 'state': 'Inactive', 'reason': self.summarize_errors() }
else:
return { 'state': 'Running' }
| 42.133568 | 146 | 0.591474 |
9e415f76a53326c5809b7a8c508701c519ab443b | 4,324 | py | Python | demo/model_zoo/resnet/example/image_list_provider.py | lzhao4ever/Paddle-master | 5c0eb23d1c021fed88416df9eae8511d36df4372 | [
"Apache-2.0"
] | 1 | 2018-12-20T12:15:39.000Z | 2018-12-20T12:15:39.000Z | demo/model_zoo/resnet/example/image_list_provider.py | lzhao4ever/Paddle-master | 5c0eb23d1c021fed88416df9eae8511d36df4372 | [
"Apache-2.0"
] | null | null | null | demo/model_zoo/resnet/example/image_list_provider.py | lzhao4ever/Paddle-master | 5c0eb23d1c021fed88416df9eae8511d36df4372 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Baidu, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.utils.image_util import *
from paddle.trainer.PyDataProvider2 import *
def hook(settings, image_size, crop_size, color, file_list, is_train, **kwargs):
"""
Description: Init with a list of data file
file_list is the name list of input files.
kwargs["load_data_args"] is the value of 'load_data_args'
which can be set in config.
Each args is separated by a column.
image_size: the crop image size.
mean_meta: the path of the meta file to store the mean image.
mean_value: can be mean value, not a file.
can not set mean_meta and mean_value at the same time.
color: 'color' means a color image. Otherwise, it means a gray image.
is_train: whether the data provider is used for training.
Data argumentation might be different for training and testing.
"""
settings.img_size = image_size
settings.crop_size = crop_size
settings.mean_img_size = settings.crop_size
settings.color = color # default is color
settings.is_train = is_train
settings.is_swap_channel = kwargs.get('swap_channel', None)
if settings.is_swap_channel is not None:
settings.swap_channel = settings.is_swap_channel
settings.is_swap_channel = True
if settings.color:
settings.img_input_size = settings.crop_size * settings.crop_size * 3
else:
settings.img_input_size = settings.crop_size * settings.crop_size
settings.file_list = file_list
settings.mean_meta = kwargs.get('mean_meta', None)
settings.mean_value = kwargs.get('mean_value', None)
# can not specify both mean_meta and mean_value.
assert not (settings.mean_meta and settings.mean_value)
if not settings.mean_meta:
settings.mean_value = kwargs.get('mean_value')
sz = settings.crop_size * settings.crop_size
settings.img_mean = np.zeros(sz * 3, dtype=np.single)
for idx, value in enumerate(settings.mean_value):
settings.img_mean[idx * sz:(idx + 1) * sz] = value
settings.img_mean = settings.img_mean.reshape(3, settings.crop_size,
settings.crop_size)
else:
settings.img_mean = load_meta(settings.mean_meta,
settings.mean_img_size,
settings.crop_size, settings.color)
settings.input_types = [
dense_vector(settings.img_input_size), # image feature
integer_value(1)
] # labels
settings.logger.info('Image short side: %s', settings.img_size)
settings.logger.info('Crop size: %s', settings.crop_size)
settings.logger.info('Meta path: %s', settings.mean_meta)
if settings.is_swap_channel:
settings.logger.info('swap channel: %s', settings.swap_channel)
settings.logger.info('DataProvider Initialization finished')
@provider(init_hook=hook, should_shuffle=False)
def processData(settings, file_list):
"""
The main function for loading data.
Load the batch, iterate all the images and labels in this batch.
file_name: the batch file name.
"""
img_path, lab = file_list.strip().split(' ')
img = Image.open(img_path)
img.load()
img = img.resize((settings.img_size, settings.img_size), Image.ANTIALIAS)
img = np.array(img).astype(np.float32)
if len(img.shape) == 3:
img = np.swapaxes(img, 1, 2)
img = np.swapaxes(img, 1, 0)
# swap channel
if settings.is_swap_channel:
img = img[settings.swap_channel, :, :]
img_feat = preprocess_img(img, settings.img_mean, settings.crop_size,
settings.is_train, settings.color)
yield img_feat.tolist(), int(lab.strip())
| 41.980583 | 80 | 0.681776 |
83b3a4c336a6d38d5c1df73387fe57792d3ce0b3 | 963 | py | Python | adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/matrixkeypad_simpletest.py | jacoblb64/pico_rgb_keypad_hid | 3251ca6a98ef86d9f98c54f639c4d61810601a0b | [
"MIT"
] | 47 | 2021-02-15T23:02:36.000Z | 2022-03-04T21:30:03.000Z | adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/matrixkeypad_simpletest.py | jacoblb64/pico_rgb_keypad_hid | 3251ca6a98ef86d9f98c54f639c4d61810601a0b | [
"MIT"
] | 7 | 2021-02-19T20:00:08.000Z | 2022-01-14T10:51:12.000Z | adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/matrixkeypad_simpletest.py | jacoblb64/pico_rgb_keypad_hid | 3251ca6a98ef86d9f98c54f639c4d61810601a0b | [
"MIT"
] | 14 | 2021-02-20T17:40:56.000Z | 2022-01-01T19:53:38.000Z | # SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
import time
import digitalio
import board
import adafruit_matrixkeypad
# Membrane 3x4 matrix keypad - https://www.adafruit.com/product/419
cols = [digitalio.DigitalInOut(x) for x in (board.D9, board.D6, board.D5)]
rows = [digitalio.DigitalInOut(x) for x in (board.D13, board.D12, board.D11, board.D10)]
# 3x4 matrix keypad - Rows and columns are mixed up for https://www.adafruit.com/product/3845
# Use the same wiring as in the guide with the following setup lines:
# cols = [digitalio.DigitalInOut(x) for x in (board.D11, board.D13, board.D9)]
# rows = [digitalio.DigitalInOut(x) for x in (board.D12, board.D5, board.D6, board.D10)]
keys = ((1, 2, 3), (4, 5, 6), (7, 8, 9), ("*", 0, "#"))
keypad = adafruit_matrixkeypad.Matrix_Keypad(rows, cols, keys)
while True:
keys = keypad.pressed_keys
if keys:
print("Pressed: ", keys)
time.sleep(0.1)
| 35.666667 | 93 | 0.708204 |
fc7cdd256a0d253dd10370b9e1436f59755876d9 | 875 | py | Python | FiniteJH/proj_simplex.py | microsoft/Dr-Jekyll-and-Mr-Hyde-The-Strange-Case-of-Off-Policy-Policy-Updates | e085b10156787838a342037e6042af00f5262d5a | [
"MIT"
] | 2 | 2021-12-22T18:18:30.000Z | 2022-02-25T17:57:33.000Z | FiniteJH/proj_simplex.py | microsoft/Dr-Jekyll-and-Mr-Hyde-The-Strange-Case-of-Off-Policy-Policy-Updates | e085b10156787838a342037e6042af00f5262d5a | [
"MIT"
] | null | null | null | FiniteJH/proj_simplex.py | microsoft/Dr-Jekyll-and-Mr-Hyde-The-Strange-Case-of-Off-Policy-Policy-Updates | e085b10156787838a342037e6042af00f5262d5a | [
"MIT"
] | null | null | null | """
Implements three algorithms for projecting a vector onto the simplex: sort, pivot and bisection.
For details and references, see the following paper:
Large-scale Multiclass Support Vector Machine Training via Euclidean Projection onto the Simplex
Mathieu Blondel, Akinori Fujino, and Naonori Ueda.
ICPR 2014.
http://www.mblondel.org/publications/mblondel-icpr2014.pdf
"""
import numpy as np
def projection_simplex_sort(v_list, z=1):
w_list = np.zeros(v_list.shape)
for i in range(v_list.shape[0]):
v = v_list[i]
n_features = v.shape[0]
u = np.sort(v)[::-1]
cssv = np.cumsum(u) - z
ind = np.arange(n_features) + 1
cond = u - cssv / ind > 0
rho = ind[cond][-1]
theta = cssv[cond][-1] / float(rho)
w = np.maximum(v - theta, 0)
w_list[i] = w
return w_list | 33.653846 | 97 | 0.633143 |
c0da5566ae4efc99a0852e6d416397052d541905 | 7,010 | py | Python | ncfm/stacking.py | kirk86/kaggle | 8178ed790bc1f8fcd2cd7a01560e5f25f01c07cc | [
"MIT"
] | null | null | null | ncfm/stacking.py | kirk86/kaggle | 8178ed790bc1f8fcd2cd7a01560e5f25f01c07cc | [
"MIT"
] | null | null | null | ncfm/stacking.py | kirk86/kaggle | 8178ed790bc1f8fcd2cd7a01560e5f25f01c07cc | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Blending {RandomForests, ExtraTrees, GradientBoosting} + stretching to
[0,1]. The blending scheme is related to the idea Jose H. Solorzano
presented here:
http://www.kaggle.com/c/bioresponse/forums/t/1889/question-about-the-process-of-ensemble-learning/10950#post10950
'''You can try this: In one of the 5 folds, train the models, then use
the results of the models as 'variables' in logistic regression over
the validation data of that fold'''.
Note: if you increase the number of estimators of the classifiers,
e.g. n_estimators=1000, you get a better score/rank on the private
test set.
"""
from __future__ import division
import numpy as np
# import load_data
from sklearn.model_selection import StratifiedKFold
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
# from sklearn.naive_bayes import MultinomialNB
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RationalQuadratic
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
# from sklearn.ensemble import GradientBoostingClassifier
from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
# from pysofia.compat import RankSVMCV
from fastFM import sgd
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
# from sklearn.cross_validation import cross_val_score
from keras.utils import np_utils
import pandas as pd
from sklearn.svm import LinearSVC
def logloss(y_true, y_pred, epsilon=1.0e-15):
"""Logloss, i.e. the score of the bioresponse competition.
"""
attempt = np.clip(y_pred, epsilon, 1.0-epsilon)
return - np.mean(y_true * np.log(y_pred) +
(1.0 - y_true) * np.log(1.0 - attempt))
def ensemble(trX, trY, teX, teX_id):
np.random.seed(2017) # seed to shuffle the train set
n_folds = 10
# verbose = True
shuffle = False
# X, y, X_test = load_data.load()
# X_submission is X_test
trX = trX.reshape(trX.shape[0], np.prod(trX.shape[1:]))
trY = np.argmax(trY, axis=1)
teX = teX.reshape(teX.shape[0], np.prod(teX.shape[1:]))
if shuffle:
idx = np.random.permutation(trY.size)
trX = trX[idx]
trY = trY[idx]
skf = StratifiedKFold(n_splits=n_folds)
clfs = [
GaussianProcessClassifier(kernel=RationalQuadratic,
n_restarts_optimizer=100,
max_iter_predict=1000,
warm_start=True,
n_jobs=-1),
RandomForestClassifier(n_estimators=1000, n_jobs=-1,
criterion='gini'),
RandomForestClassifier(n_estimators=1000, n_jobs=-1,
criterion='entropy'),
ExtraTreesClassifier(n_estimators=1000, n_jobs=-1,
criterion='gini'),
ExtraTreesClassifier(n_estimators=1000, n_jobs=-1,
criterion='entropy'),
XGBClassifier(learning_rate=0.05, subsample=0.5,
max_depth=6, n_estimators=1000),
LGBMClassifier(learning_rate=0.05, subsample=0.5,
max_depth=6, n_estimators=1000),
KNeighborsClassifier(n_neighbors=10, algorithm='ball_tree',
p=1, leaf_size=30, n_jobs=-1),
QuadraticDiscriminantAnalysis(reg_param=1e-2),
LinearSVC(class_weight='auto', verbose=True, max_iter=10000,
tol=1e-6, C=1)
# RankSVMCV(max_iter=500)
]
print "Creating train and test sets for blending."
dataset_blend_train = np.zeros((trX.shape[0], len(clfs)))
dataset_blend_test = np.zeros((teX.shape[0], len(clfs)))
for j, clf in enumerate(clfs):
print j, clf
dataset_blend_test_j = np.zeros((teX.shape[0], n_folds))
for i, (trX_idx, valX_idx) in zip(range(n_folds),
skf.split(np.zeros(len(trX)),
trY)):
print "Fold", i
X = trX[trX_idx]
Y = trY[trX_idx]
valX = trX[valX_idx]
valY = trY[valX_idx]
clf.fit(X, Y)
valY_pred = clf.predict_proba(valX)
dataset_blend_train[valX_idx, j] = valY_pred[:, 1]
dataset_blend_test_j[:, i] = clf.predict_proba(teX)[:, 1]
# print(metrics.classification_report(valY,
# np.argmax(valY_pred, axis=1)))
# print(metrics.confusion_matrix(valY, np.argmax(valY_pred, axis=1)))
dataset_blend_test[:, j] = dataset_blend_test_j.mean(1) # averaging
# the
# predictions
print
print "Blending."
clf = LogisticRegression(C=10, solver='lbfgs', penalty='l2',
multi_class='multinomial', n_jobs=-1)
# Creating model S like the kaggle
# blog example on stacking. Is
# ensembling same as stacking? Take
# away message, in stacking we use the
# predictions of the base models as
# features(i.e. meta features) for the
# stakced model. The stacked model
# able to dicern where each model
# performs well and where poorly.
# It’s also important to note that the
# meta features in row i of train_meta
# are not dependent on the target
# value in row i because they were
# produced using information that
# excluded the target_i in the base
# models’ fitting procedure.
clf.fit(dataset_blend_train, trY)
# y_pred = clf.predict_proba(dataset_blend_test)[:, 1]
y_pred = clf.predict_proba(dataset_blend_test)
print "Linear stretch of predictions to [0,1]"
y_pred = (y_pred - y_pred.min()) \
/ (y_pred.max() - y_pred.min())
# print("Log loss emanuele = {}, sklearn = {}"
# .format(logloss(trY, y_pred), metrics.log_loss(trY, y_pred)))
print "Saving Results."
df = pd.DataFrame(y_pred, columns=['ALB', 'BET', 'DOL', 'LAG',
'NoF', 'OTHER', 'SHARK', 'YFT'])
df.insert(0, 'image', teX_id)
df.to_csv('submit_ensemble.csv', index=False)
# tmp = np.vstack([range(1, len(y_pred)+1), y_pred]).T
# np.savetxt(fname='submission.csv', X=tmp, fmt='%d,%0.9f',
# header='image,ALB,BET,LOG,NoF,YFT,SHARK,OTHER', comments='')
| 43.540373 | 113 | 0.579743 |
bda1c0c1b362c0451454c49cce829f3b33031c53 | 34,791 | py | Python | flask_restx/api.py | yuvalherziger/flask-restx | 55f012a2faf96d63385a59d3bb05b03f864554e2 | [
"BSD-3-Clause"
] | 1 | 2020-09-14T15:54:35.000Z | 2020-09-14T15:54:35.000Z | flask_restx/api.py | yuvalherziger/flask-restx | 55f012a2faf96d63385a59d3bb05b03f864554e2 | [
"BSD-3-Clause"
] | null | null | null | flask_restx/api.py | yuvalherziger/flask-restx | 55f012a2faf96d63385a59d3bb05b03f864554e2 | [
"BSD-3-Clause"
] | 1 | 2020-05-25T14:22:44.000Z | 2020-05-25T14:22:44.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import difflib
import inspect
from itertools import chain
import logging
import operator
import re
import six
import sys
from collections import OrderedDict
from functools import wraps, partial
from types import MethodType
from flask import url_for, request, current_app
from flask import make_response as original_flask_make_response
from flask.helpers import _endpoint_from_view_func
from flask.signals import got_request_exception
from jsonschema import RefResolver
from werkzeug.utils import cached_property
from werkzeug.datastructures import Headers
from werkzeug.exceptions import (
HTTPException,
MethodNotAllowed,
NotFound,
NotAcceptable,
InternalServerError,
)
from werkzeug.wrappers import BaseResponse
from . import apidoc
from .mask import ParseError, MaskError
from .namespace import Namespace
from .postman import PostmanCollectionV1
from .resource import Resource
from .swagger import Swagger
from .utils import default_id, camel_to_dash, unpack
from .representations import output_json
from ._http import HTTPStatus
RE_RULES = re.compile("(<.*>)")
# List headers that should never be handled by Flask-RESTX
HEADERS_BLACKLIST = ("Content-Length",)
DEFAULT_REPRESENTATIONS = [("application/json", output_json)]
log = logging.getLogger(__name__)
class Api(object):
"""
The main entry point for the application.
You need to initialize it with a Flask Application: ::
>>> app = Flask(__name__)
>>> api = Api(app)
Alternatively, you can use :meth:`init_app` to set the Flask application
after it has been constructed.
The endpoint parameter prefix all views and resources:
- The API root/documentation will be ``{endpoint}.root``
- A resource registered as 'resource' will be available as ``{endpoint}.resource``
:param flask.Flask|flask.Blueprint app: the Flask application object or a Blueprint
:param str version: The API version (used in Swagger documentation)
:param str title: The API title (used in Swagger documentation)
:param str description: The API description (used in Swagger documentation)
:param str terms_url: The API terms page URL (used in Swagger documentation)
:param str contact: A contact email for the API (used in Swagger documentation)
:param str license: The license associated to the API (used in Swagger documentation)
:param str license_url: The license page URL (used in Swagger documentation)
:param str endpoint: The API base endpoint (default to 'api).
:param str default: The default namespace base name (default to 'default')
:param str default_label: The default namespace label (used in Swagger documentation)
:param str default_mediatype: The default media type to return
:param bool validate: Whether or not the API should perform input payload validation.
:param bool ordered: Whether or not preserve order models and marshalling.
:param str doc: The documentation path. If set to a false value, documentation is disabled.
(Default to '/')
:param list decorators: Decorators to attach to every resource
:param bool catch_all_404s: Use :meth:`handle_error`
to handle 404 errors throughout your app
:param dict authorizations: A Swagger Authorizations declaration as dictionary
:param bool serve_challenge_on_401: Serve basic authentication challenge with 401
responses (default 'False')
:param FormatChecker format_checker: A jsonschema.FormatChecker object that is hooked into
the Model validator. A default or a custom FormatChecker can be provided (e.g., with custom
checkers), otherwise the default action is to not enforce any format validation.
"""
def __init__(
self,
app=None,
version="1.0",
title=None,
description=None,
terms_url=None,
license=None,
license_url=None,
contact=None,
contact_url=None,
contact_email=None,
authorizations=None,
security=None,
doc="/",
default_id=default_id,
default="default",
default_label="Default namespace",
validate=None,
tags=None,
prefix="",
ordered=False,
default_mediatype="application/json",
decorators=None,
catch_all_404s=False,
serve_challenge_on_401=False,
format_checker=None,
**kwargs
):
self.version = version
self.title = title or "API"
self.description = description
self.terms_url = terms_url
self.contact = contact
self.contact_email = contact_email
self.contact_url = contact_url
self.license = license
self.license_url = license_url
self.authorizations = authorizations
self.security = security
self.default_id = default_id
self.ordered = ordered
self._validate = validate
self._doc = doc
self._doc_view = None
self._default_error_handler = None
self.tags = tags or []
self.error_handlers = {
ParseError: mask_parse_error_handler,
MaskError: mask_error_handler,
}
self._schema = None
self.models = {}
self._refresolver = None
self.format_checker = format_checker
self.namespaces = []
self.ns_paths = dict()
self.representations = OrderedDict(DEFAULT_REPRESENTATIONS)
self.urls = {}
self.prefix = prefix
self.default_mediatype = default_mediatype
self.decorators = decorators if decorators else []
self.catch_all_404s = catch_all_404s
self.serve_challenge_on_401 = serve_challenge_on_401
self.blueprint_setup = None
self.endpoints = set()
self.resources = []
self.app = None
self.blueprint = None
# must come after self.app initialisation to prevent __getattr__ recursion
# in self._configure_namespace_logger
self.default_namespace = self.namespace(
default,
default_label,
endpoint="{0}-declaration".format(default),
validate=validate,
api=self,
path="/",
)
if app is not None:
self.app = app
self.init_app(app)
# super(Api, self).__init__(app, **kwargs)
def init_app(self, app, **kwargs):
"""
Allow to lazy register the API on a Flask application::
>>> app = Flask(__name__)
>>> api = Api()
>>> api.init_app(app)
:param flask.Flask app: the Flask application object
:param str title: The API title (used in Swagger documentation)
:param str description: The API description (used in Swagger documentation)
:param str terms_url: The API terms page URL (used in Swagger documentation)
:param str contact: A contact email for the API (used in Swagger documentation)
:param str license: The license associated to the API (used in Swagger documentation)
:param str license_url: The license page URL (used in Swagger documentation)
"""
self.app = app
self.title = kwargs.get("title", self.title)
self.description = kwargs.get("description", self.description)
self.terms_url = kwargs.get("terms_url", self.terms_url)
self.contact = kwargs.get("contact", self.contact)
self.contact_url = kwargs.get("contact_url", self.contact_url)
self.contact_email = kwargs.get("contact_email", self.contact_email)
self.license = kwargs.get("license", self.license)
self.license_url = kwargs.get("license_url", self.license_url)
self._add_specs = kwargs.get("add_specs", True)
# If app is a blueprint, defer the initialization
try:
app.record(self._deferred_blueprint_init)
# Flask.Blueprint has a 'record' attribute, Flask.Api does not
except AttributeError:
self._init_app(app)
else:
self.blueprint = app
def _init_app(self, app):
"""
Perform initialization actions with the given :class:`flask.Flask` object.
:param flask.Flask app: The flask application object
"""
self._register_specs(self.blueprint or app)
self._register_doc(self.blueprint or app)
app.handle_exception = partial(self.error_router, app.handle_exception)
app.handle_user_exception = partial(
self.error_router, app.handle_user_exception
)
if len(self.resources) > 0:
for resource, namespace, urls, kwargs in self.resources:
self._register_view(app, resource, namespace, *urls, **kwargs)
for ns in self.namespaces:
self._configure_namespace_logger(app, ns)
self._register_apidoc(app)
self._validate = (
self._validate
if self._validate is not None
else app.config.get("RESTX_VALIDATE", False)
)
app.config.setdefault("RESTX_MASK_HEADER", "X-Fields")
app.config.setdefault("RESTX_MASK_SWAGGER", True)
app.config.setdefault("RESTX_INCLUDE_ALL_MODELS", False)
def __getattr__(self, name):
try:
return getattr(self.default_namespace, name)
except AttributeError:
raise AttributeError("Api does not have {0} attribute".format(name))
def _complete_url(self, url_part, registration_prefix):
"""
This method is used to defer the construction of the final url in
the case that the Api is created with a Blueprint.
:param url_part: The part of the url the endpoint is registered with
:param registration_prefix: The part of the url contributed by the
blueprint. Generally speaking, BlueprintSetupState.url_prefix
"""
parts = (registration_prefix, self.prefix, url_part)
return "".join(part for part in parts if part)
def _register_apidoc(self, app):
conf = app.extensions.setdefault("restx", {})
if not conf.get("apidoc_registered", False):
app.register_blueprint(apidoc.apidoc)
conf["apidoc_registered"] = True
def _register_specs(self, app_or_blueprint):
if self._add_specs:
endpoint = str("specs")
self._register_view(
app_or_blueprint,
SwaggerView,
self.default_namespace,
"/swagger.json",
endpoint=endpoint,
resource_class_args=(self,),
)
self.endpoints.add(endpoint)
def _register_doc(self, app_or_blueprint):
if self._add_specs and self._doc:
# Register documentation before root if enabled
app_or_blueprint.add_url_rule(self._doc, "doc", self.render_doc)
app_or_blueprint.add_url_rule(self.prefix or "/", "root", self.render_root)
def register_resource(self, namespace, resource, *urls, **kwargs):
endpoint = kwargs.pop("endpoint", None)
endpoint = str(endpoint or self.default_endpoint(resource, namespace))
kwargs["endpoint"] = endpoint
self.endpoints.add(endpoint)
if self.app is not None:
self._register_view(self.app, resource, namespace, *urls, **kwargs)
else:
self.resources.append((resource, namespace, urls, kwargs))
return endpoint
def _configure_namespace_logger(self, app, namespace):
for handler in app.logger.handlers:
namespace.logger.addHandler(handler)
namespace.logger.setLevel(app.logger.level)
def _register_view(self, app, resource, namespace, *urls, **kwargs):
endpoint = kwargs.pop("endpoint", None) or camel_to_dash(resource.__name__)
resource_class_args = kwargs.pop("resource_class_args", ())
resource_class_kwargs = kwargs.pop("resource_class_kwargs", {})
# NOTE: 'view_functions' is cleaned up from Blueprint class in Flask 1.0
if endpoint in getattr(app, "view_functions", {}):
previous_view_class = app.view_functions[endpoint].__dict__["view_class"]
# if you override the endpoint with a different class, avoid the
# collision by raising an exception
if previous_view_class != resource:
msg = "This endpoint (%s) is already set to the class %s."
raise ValueError(msg % (endpoint, previous_view_class.__name__))
resource.mediatypes = self.mediatypes_method() # Hacky
resource.endpoint = endpoint
resource_func = self.output(
resource.as_view(
endpoint, self, *resource_class_args, **resource_class_kwargs
)
)
# Apply Namespace and Api decorators to a resource
for decorator in chain(namespace.decorators, self.decorators):
resource_func = decorator(resource_func)
for url in urls:
# If this Api has a blueprint
if self.blueprint:
# And this Api has been setup
if self.blueprint_setup:
# Set the rule to a string directly, as the blueprint is already
# set up.
self.blueprint_setup.add_url_rule(
url, view_func=resource_func, **kwargs
)
continue
else:
# Set the rule to a function that expects the blueprint prefix
# to construct the final url. Allows deferment of url finalization
# in the case that the associated Blueprint has not yet been
# registered to an application, so we can wait for the registration
# prefix
rule = partial(self._complete_url, url)
else:
# If we've got no Blueprint, just build a url with no prefix
rule = self._complete_url(url, "")
# Add the url to the application or blueprint
app.add_url_rule(rule, view_func=resource_func, **kwargs)
def output(self, resource):
"""
Wraps a resource (as a flask view function),
for cases where the resource does not directly return a response object
:param resource: The resource as a flask view function
"""
@wraps(resource)
def wrapper(*args, **kwargs):
resp = resource(*args, **kwargs)
if isinstance(resp, BaseResponse):
return resp
data, code, headers = unpack(resp)
return self.make_response(data, code, headers=headers)
return wrapper
def make_response(self, data, *args, **kwargs):
"""
Looks up the representation transformer for the requested media
type, invoking the transformer to create a response object. This
defaults to default_mediatype if no transformer is found for the
requested mediatype. If default_mediatype is None, a 406 Not
Acceptable response will be sent as per RFC 2616 section 14.1
:param data: Python object containing response data to be transformed
"""
default_mediatype = (
kwargs.pop("fallback_mediatype", None) or self.default_mediatype
)
mediatype = request.accept_mimetypes.best_match(
self.representations, default=default_mediatype,
)
if mediatype is None:
raise NotAcceptable()
if mediatype in self.representations:
resp = self.representations[mediatype](data, *args, **kwargs)
resp.headers["Content-Type"] = mediatype
return resp
elif mediatype == "text/plain":
resp = original_flask_make_response(str(data), *args, **kwargs)
resp.headers["Content-Type"] = "text/plain"
return resp
else:
raise InternalServerError()
def documentation(self, func):
"""A decorator to specify a view function for the documentation"""
self._doc_view = func
return func
def render_root(self):
self.abort(HTTPStatus.NOT_FOUND)
def render_doc(self):
"""Override this method to customize the documentation page"""
if self._doc_view:
return self._doc_view()
elif not self._doc:
self.abort(HTTPStatus.NOT_FOUND)
return apidoc.ui_for(self)
def default_endpoint(self, resource, namespace):
"""
Provide a default endpoint for a resource on a given namespace.
Endpoints are ensured not to collide.
Override this method specify a custom algorithm for default endpoint.
:param Resource resource: the resource for which we want an endpoint
:param Namespace namespace: the namespace holding the resource
:returns str: An endpoint name
"""
endpoint = camel_to_dash(resource.__name__)
if namespace is not self.default_namespace:
endpoint = "{ns.name}_{endpoint}".format(ns=namespace, endpoint=endpoint)
if endpoint in self.endpoints:
suffix = 2
while True:
new_endpoint = "{base}_{suffix}".format(base=endpoint, suffix=suffix)
if new_endpoint not in self.endpoints:
endpoint = new_endpoint
break
suffix += 1
return endpoint
def get_ns_path(self, ns):
return self.ns_paths.get(ns)
def ns_urls(self, ns, urls):
path = self.get_ns_path(ns) or ns.path
return [path + url for url in urls]
def add_namespace(self, ns, path=None):
"""
This method registers resources from namespace for current instance of api.
You can use argument path for definition custom prefix url for namespace.
:param Namespace ns: the namespace
:param path: registration prefix of namespace
"""
if ns not in self.namespaces:
self.namespaces.append(ns)
if self not in ns.apis:
ns.apis.append(self)
# Associate ns with prefix-path
if path is not None:
self.ns_paths[ns] = path
# Register resources
for r in ns.resources:
urls = self.ns_urls(ns, r.urls)
self.register_resource(ns, r.resource, *urls, **r.kwargs)
# Register models
for name, definition in six.iteritems(ns.models):
self.models[name] = definition
if not self.blueprint and self.app is not None:
self._configure_namespace_logger(self.app, ns)
def namespace(self, *args, **kwargs):
"""
A namespace factory.
:returns Namespace: a new namespace instance
"""
kwargs["ordered"] = kwargs.get("ordered", self.ordered)
ns = Namespace(*args, **kwargs)
self.add_namespace(ns)
return ns
def endpoint(self, name):
if self.blueprint:
return "{0}.{1}".format(self.blueprint.name, name)
else:
return name
@property
def specs_url(self):
"""
The Swagger specifications absolute url (ie. `swagger.json`)
:rtype: str
"""
return url_for(self.endpoint("specs"), _external=True)
@property
def base_url(self):
"""
The API base absolute url
:rtype: str
"""
return url_for(self.endpoint("root"), _external=True)
@property
def base_path(self):
"""
The API path
:rtype: str
"""
return url_for(self.endpoint("root"), _external=False)
@cached_property
def __schema__(self):
"""
The Swagger specifications/schema for this API
:returns dict: the schema as a serializable dict
"""
if not self._schema:
try:
self._schema = Swagger(self).as_dict()
except Exception:
# Log the source exception for debugging purpose
# and return an error message
msg = "Unable to render schema"
log.exception(msg) # This will provide a full traceback
return {"error": msg}
return self._schema
@property
def _own_and_child_error_handlers(self):
rv = {}
rv.update(self.error_handlers)
for ns in self.namespaces:
for exception, handler in six.iteritems(ns.error_handlers):
rv[exception] = handler
return rv
def errorhandler(self, exception):
"""A decorator to register an error handler for a given exception"""
if inspect.isclass(exception) and issubclass(exception, Exception):
# Register an error handler for a given exception
def wrapper(func):
self.error_handlers[exception] = func
return func
return wrapper
else:
# Register the default error handler
self._default_error_handler = exception
return exception
def owns_endpoint(self, endpoint):
"""
Tests if an endpoint name (not path) belongs to this Api.
Takes into account the Blueprint name part of the endpoint name.
:param str endpoint: The name of the endpoint being checked
:return: bool
"""
if self.blueprint:
if endpoint.startswith(self.blueprint.name):
endpoint = endpoint.split(self.blueprint.name + ".", 1)[-1]
else:
return False
return endpoint in self.endpoints
def _should_use_fr_error_handler(self):
"""
Determine if error should be handled with FR or default Flask
The goal is to return Flask error handlers for non-FR-related routes,
and FR errors (with the correct media type) for FR endpoints. This
method currently handles 404 and 405 errors.
:return: bool
"""
adapter = current_app.create_url_adapter(request)
try:
adapter.match()
except MethodNotAllowed as e:
# Check if the other HTTP methods at this url would hit the Api
valid_route_method = e.valid_methods[0]
rule, _ = adapter.match(method=valid_route_method, return_rule=True)
return self.owns_endpoint(rule.endpoint)
except NotFound:
return self.catch_all_404s
except Exception:
# Werkzeug throws other kinds of exceptions, such as Redirect
pass
def _has_fr_route(self):
"""Encapsulating the rules for whether the request was to a Flask endpoint"""
# 404's, 405's, which might not have a url_rule
if self._should_use_fr_error_handler():
return True
# for all other errors, just check if FR dispatched the route
if not request.url_rule:
return False
return self.owns_endpoint(request.url_rule.endpoint)
def error_router(self, original_handler, e):
"""
This function decides whether the error occurred in a flask-restx
endpoint or not. If it happened in a flask-restx endpoint, our
handler will be dispatched. If it happened in an unrelated view, the
app's original error handler will be dispatched.
In the event that the error occurred in a flask-restx endpoint but
the local handler can't resolve the situation, the router will fall
back onto the original_handler as last resort.
:param function original_handler: the original Flask error handler for the app
:param Exception e: the exception raised while handling the request
"""
if self._has_fr_route():
try:
return self.handle_error(e)
except Exception as f:
return original_handler(f)
return original_handler(e)
def handle_error(self, e):
"""
Error handler for the API transforms a raised exception into a Flask response,
with the appropriate HTTP status code and body.
:param Exception e: the raised Exception object
"""
got_request_exception.send(current_app._get_current_object(), exception=e)
# When propagate_exceptions is set, do not return the exception to the
# client if a handler is configured for the exception.
if (
not isinstance(e, HTTPException)
and current_app.propagate_exceptions
and not isinstance(e, tuple(self.error_handlers.keys()))
):
exc_type, exc_value, tb = sys.exc_info()
if exc_value is e:
raise
else:
raise e
include_message_in_response = current_app.config.get(
"ERROR_INCLUDE_MESSAGE", True
)
default_data = {}
headers = Headers()
for typecheck, handler in six.iteritems(self._own_and_child_error_handlers):
if isinstance(e, typecheck):
result = handler(e)
default_data, code, headers = unpack(
result, HTTPStatus.INTERNAL_SERVER_ERROR
)
break
else:
if isinstance(e, HTTPException):
code = HTTPStatus(e.code)
if include_message_in_response:
default_data = {"message": getattr(e, "description", code.phrase)}
headers = e.get_response().headers
elif self._default_error_handler:
result = self._default_error_handler(e)
default_data, code, headers = unpack(
result, HTTPStatus.INTERNAL_SERVER_ERROR
)
else:
code = HTTPStatus.INTERNAL_SERVER_ERROR
if include_message_in_response:
default_data = {
"message": code.phrase,
}
if include_message_in_response:
default_data["message"] = default_data.get("message", str(e))
data = getattr(e, "data", default_data)
fallback_mediatype = None
if code >= HTTPStatus.INTERNAL_SERVER_ERROR:
exc_info = sys.exc_info()
if exc_info[1] is None:
exc_info = None
current_app.log_exception(exc_info)
elif (
code == HTTPStatus.NOT_FOUND
and current_app.config.get("ERROR_404_HELP", True)
and include_message_in_response
):
data["message"] = self._help_on_404(data.get("message", None))
elif code == HTTPStatus.NOT_ACCEPTABLE and self.default_mediatype is None:
# if we are handling NotAcceptable (406), make sure that
# make_response uses a representation we support as the
# default mediatype (so that make_response doesn't throw
# another NotAcceptable error).
supported_mediatypes = list(self.representations.keys())
fallback_mediatype = (
supported_mediatypes[0] if supported_mediatypes else "text/plain"
)
# Remove blacklisted headers
for header in HEADERS_BLACKLIST:
headers.pop(header, None)
resp = self.make_response(
data, code, headers, fallback_mediatype=fallback_mediatype
)
if code == HTTPStatus.UNAUTHORIZED:
resp = self.unauthorized(resp)
return resp
def _help_on_404(self, message=None):
rules = dict(
[
(RE_RULES.sub("", rule.rule), rule.rule)
for rule in current_app.url_map.iter_rules()
]
)
close_matches = difflib.get_close_matches(request.path, rules.keys())
if close_matches:
# If we already have a message, add punctuation and continue it.
message = "".join(
(
(message.rstrip(".") + ". ") if message else "",
"You have requested this URI [",
request.path,
"] but did you mean ",
" or ".join((rules[match] for match in close_matches)),
" ?",
)
)
return message
def as_postman(self, urlvars=False, swagger=False):
"""
Serialize the API as Postman collection (v1)
:param bool urlvars: whether to include or not placeholders for query strings
:param bool swagger: whether to include or not the swagger.json specifications
"""
return PostmanCollectionV1(self, swagger=swagger).as_dict(urlvars=urlvars)
@property
def payload(self):
"""Store the input payload in the current request context"""
return request.get_json()
@property
def refresolver(self):
if not self._refresolver:
self._refresolver = RefResolver.from_schema(self.__schema__)
return self._refresolver
@staticmethod
def _blueprint_setup_add_url_rule_patch(
blueprint_setup, rule, endpoint=None, view_func=None, **options
):
"""
Method used to patch BlueprintSetupState.add_url_rule for setup
state instance corresponding to this Api instance. Exists primarily
to enable _complete_url's function.
:param blueprint_setup: The BlueprintSetupState instance (self)
:param rule: A string or callable that takes a string and returns a
string(_complete_url) that is the url rule for the endpoint
being registered
:param endpoint: See BlueprintSetupState.add_url_rule
:param view_func: See BlueprintSetupState.add_url_rule
:param **options: See BlueprintSetupState.add_url_rule
"""
if callable(rule):
rule = rule(blueprint_setup.url_prefix)
elif blueprint_setup.url_prefix:
rule = blueprint_setup.url_prefix + rule
options.setdefault("subdomain", blueprint_setup.subdomain)
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
defaults = blueprint_setup.url_defaults
if "defaults" in options:
defaults = dict(defaults, **options.pop("defaults"))
blueprint_setup.app.add_url_rule(
rule,
"%s.%s" % (blueprint_setup.blueprint.name, endpoint),
view_func,
defaults=defaults,
**options
)
def _deferred_blueprint_init(self, setup_state):
"""
Synchronize prefix between blueprint/api and registration options, then
perform initialization with setup_state.app :class:`flask.Flask` object.
When a :class:`flask_restx.Api` object is initialized with a blueprint,
this method is recorded on the blueprint to be run when the blueprint is later
registered to a :class:`flask.Flask` object. This method also monkeypatches
BlueprintSetupState.add_url_rule with _blueprint_setup_add_url_rule_patch.
:param setup_state: The setup state object passed to deferred functions
during blueprint registration
:type setup_state: flask.blueprints.BlueprintSetupState
"""
self.blueprint_setup = setup_state
if setup_state.add_url_rule.__name__ != "_blueprint_setup_add_url_rule_patch":
setup_state._original_add_url_rule = setup_state.add_url_rule
setup_state.add_url_rule = MethodType(
Api._blueprint_setup_add_url_rule_patch, setup_state
)
if not setup_state.first_registration:
raise ValueError("flask-restx blueprints can only be registered once.")
self._init_app(setup_state.app)
def mediatypes_method(self):
"""Return a method that returns a list of mediatypes"""
return lambda resource_cls: self.mediatypes() + [self.default_mediatype]
def mediatypes(self):
"""Returns a list of requested mediatypes sent in the Accept header"""
return [
h
for h, q in sorted(
request.accept_mimetypes, key=operator.itemgetter(1), reverse=True
)
]
def representation(self, mediatype):
"""
Allows additional representation transformers to be declared for the
api. Transformers are functions that must be decorated with this
method, passing the mediatype the transformer represents. Three
arguments are passed to the transformer:
* The data to be represented in the response body
* The http status code
* A dictionary of headers
The transformer should convert the data appropriately for the mediatype
and return a Flask response object.
Ex::
@api.representation('application/xml')
def xml(data, code, headers):
resp = make_response(convert_data_to_xml(data), code)
resp.headers.extend(headers)
return resp
"""
def wrapper(func):
self.representations[mediatype] = func
return func
return wrapper
def unauthorized(self, response):
"""Given a response, change it to ask for credentials"""
if self.serve_challenge_on_401:
realm = current_app.config.get("HTTP_BASIC_AUTH_REALM", "flask-restx")
challenge = '{0} realm="{1}"'.format("Basic", realm)
response.headers["WWW-Authenticate"] = challenge
return response
def url_for(self, resource, **values):
"""
Generates a URL to the given resource.
Works like :func:`flask.url_for`.
"""
endpoint = resource.endpoint
if self.blueprint:
endpoint = "{0}.{1}".format(self.blueprint.name, endpoint)
return url_for(endpoint, **values)
class SwaggerView(Resource):
"""Render the Swagger specifications as JSON"""
def get(self):
schema = self.api.__schema__
return (
schema,
HTTPStatus.INTERNAL_SERVER_ERROR if "error" in schema else HTTPStatus.OK,
)
def mediatypes(self):
return ["application/json"]
def mask_parse_error_handler(error):
"""When a mask can't be parsed"""
return {"message": "Mask parse error: {0}".format(error)}, HTTPStatus.BAD_REQUEST
def mask_error_handler(error):
"""When any error occurs on mask"""
return {"message": "Mask error: {0}".format(error)}, HTTPStatus.BAD_REQUEST
| 37.449946 | 99 | 0.627145 |
27cd1e83c9c88595423cc7bb43a415f9dd8a2b43 | 345 | py | Python | zonepy/__init__.py | brclark-usgs/zonepy | d857fbace05855a2f20dc2c515885b2ce87f3999 | [
"BSD-3-Clause"
] | null | null | null | zonepy/__init__.py | brclark-usgs/zonepy | d857fbace05855a2f20dc2c515885b2ce87f3999 | [
"BSD-3-Clause"
] | 12 | 2018-01-31T21:36:07.000Z | 2020-03-10T14:43:29.000Z | zonepy/__init__.py | brclark-usgs/zonepy | d857fbace05855a2f20dc2c515885b2ce87f3999 | [
"BSD-3-Clause"
] | null | null | null | '''
Zonal Analysis
Developed by Brian Clark, Katherine Knierim, and Leslie Duncan. Portions of
this code were modified from Copyright 2013 Matthew Perry, which were
licensed under BSD-3 and included in this repo.
'''
__name__ = 'zonepy'
__author__ = 'Brian Clark, Katherine Kneirim, and Leslie Duncan'
# imports
from .zn import ZoneClass
| 21.5625 | 76 | 0.765217 |
be2ca34c062881367d144bc04d9b9a326e48c904 | 386 | py | Python | presentation/controller_group.py | NinjaGeeks/cli-python-IOTNinja | 444b6f84587580d7659c60773a636188654be074 | [
"MIT"
] | null | null | null | presentation/controller_group.py | NinjaGeeks/cli-python-IOTNinja | 444b6f84587580d7659c60773a636188654be074 | [
"MIT"
] | null | null | null | presentation/controller_group.py | NinjaGeeks/cli-python-IOTNinja | 444b6f84587580d7659c60773a636188654be074 | [
"MIT"
] | null | null | null | from base_controllers import ControllerGroup
# As soon as possible
class VerticalControllerGroup(ControllerGroup):
def __init__(self, cg_id):
super().__init__(cg_id)
self.__define_type("verticalGroup")
class HorizontalControllerGroup(ControllerGroup):
def __init__(self, cg_id):
super().__init__(cg_id)
self.__define_type("horizontalGroup")
| 24.125 | 49 | 0.733161 |
406df48942fe7468e5c68daadba76bc4c1c17b5e | 30,352 | py | Python | dvaapp/tasks.py | pune-lug/DeepVideoAnalytics | 2650037040dca49b0f537df576af123dae8cef97 | [
"Apache-2.0"
] | null | null | null | dvaapp/tasks.py | pune-lug/DeepVideoAnalytics | 2650037040dca49b0f537df576af123dae8cef97 | [
"Apache-2.0"
] | null | null | null | dvaapp/tasks.py | pune-lug/DeepVideoAnalytics | 2650037040dca49b0f537df576af123dae8cef97 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
import subprocess,sys,shutil,os,glob,time,logging
from django.conf import settings
from dva.celery import app
from .models import Video, Frame, Detection, TEvent, Query, IndexEntries,QueryResults, Annotation, VLabel, Export, VDNDataset, S3Export, S3Import, Clusters, ClusterCodes
from dvalib import entity
from dvalib import detector
from dvalib import indexer
from collections import defaultdict
import calendar
import requests
from PIL import Image
from scipy import misc
import json
import celery
import zipfile
from . import serializers
import boto3
from botocore.exceptions import ClientError
from dvalib import clustering
def process_video_next(video_id,current_task_name):
if current_task_name in settings.POST_OPERATION_TASKS:
for k in settings.POST_OPERATION_TASKS[current_task_name]:
app.send_task(k,args=[video_id,],queue=settings.TASK_NAMES_TO_QUEUE[k])
class IndexerTask(celery.Task):
_visual_indexer = None
_clusterer = None
@property
def visual_indexer(self):
if IndexerTask._visual_indexer is None:
IndexerTask._visual_indexer = {'inception': indexer.InceptionIndexer(),
'facenet': indexer.FacenetIndexer(),
'alexnet': indexer.AlexnetIndexer()}
return IndexerTask._visual_indexer
@property
def clusterer(self):
if IndexerTask._clusterer is None:
IndexerTask._clusterer = {'inception': None,'facenet': None,'alexnet': None}
return IndexerTask._clusterer
def refresh_index(self,index_name):
index_entries = IndexEntries.objects.all()
visual_index = self.visual_indexer[index_name]
for index_entry in index_entries:
if index_entry.pk not in visual_index.loaded_entries and index_entry.algorithm == index_name:
fname = "{}/{}/indexes/{}".format(settings.MEDIA_ROOT, index_entry.video_id, index_entry.features_file_name)
vectors = indexer.np.load(fname)
vector_entries = json.load(file("{}/{}/indexes/{}".format(settings.MEDIA_ROOT, index_entry.video_id, index_entry.entries_file_name)))
logging.info("Starting {} in {}".format(index_entry.video_id, visual_index.name))
start_index = visual_index.findex
try:
visual_index.load_index(vectors, vector_entries)
except:
logging.info("ERROR Failed to load {} ".format(index_entry.video_id))
visual_index.loaded_entries[index_entry.pk] = indexer.IndexRange(start=start_index,end=visual_index.findex-1)
logging.info("finished {} in {}, current shape {}, range".format(index_entry.video_id,
visual_index.name,
visual_index.index.shape,
visual_index.loaded_entries[index_entry.pk].start,
visual_index.loaded_entries[index_entry.pk].end,
))
def load_clusterer(self,algorithm):
dc = Clusters.objects.all().filter(completed=True,indexer_algorithm=algorithm).last()
if dc:
model_file_name = "{}/clusters/{}.proto".format(settings.MEDIA_ROOT,dc.pk)
IndexerTask._clusterer[algorithm] = clustering.Clustering(fnames=[],m=None,v=None,sub=None,
n_components=None,
model_proto_filename=model_file_name,dc=dc)
logging.warning("loading clusterer {}".format(model_file_name))
IndexerTask._clusterer[algorithm].load()
else:
logging.warning("No clusterer found switching to exact search for {}".format(algorithm))
@app.task(name="inception_index_by_id",base=IndexerTask)
def inception_index_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = inception_index_by_id.name
start.save()
start_time = time.time()
dv = Video.objects.get(id=video_id)
video = entity.WVideo(dv, settings.MEDIA_ROOT)
frames = Frame.objects.all().filter(video=dv)
visual_index = inception_index_by_id.visual_indexer['inception']
index_name, index_results, feat_fname, entries_fname = video.index_frames(frames,visual_index)
i = IndexEntries()
i.video = dv
i.count = len(index_results)
i.contains_frames = True
i.detection_name = 'Frame'
i.algorithm = index_name
i.entries_file_name = entries_fname.split('/')[-1]
i.features_file_name = feat_fname.split('/')[-1]
i.save()
process_video_next(video_id, start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
@app.task(name="inception_index_ssd_detection_by_id",base=IndexerTask)
def inception_index_ssd_detection_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = inception_index_ssd_detection_by_id.name
start.save()
start_time = time.time()
dv = Video.objects.get(id=video_id)
video = entity.WVideo(dv, settings.MEDIA_ROOT)
detections = Detection.objects.all().filter(video=dv,object_name__startswith='SSD_',w__gte=50,h__gte=50)
logging.info("Indexing {} SSD detections".format(detections.count()))
visual_index = inception_index_ssd_detection_by_id.visual_indexer['inception']
index_name, index_results, feat_fname, entries_fname = video.index_detections(detections,'SSD',visual_index)
i = IndexEntries()
i.video = dv
i.count = len(index_results)
i.contains_detections = True
i.detection_name = 'SSD'
i.algorithm = index_name
i.entries_file_name = entries_fname.split('/')[-1]
i.features_file_name = feat_fname.split('/')[-1]
i.save()
process_video_next(video_id, start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
@app.task(name="alexnet_index_by_id",base=IndexerTask)
def alexnet_index_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = alexnet_index_by_id.name
start.save()
start_time = time.time()
dv = Video.objects.get(id=video_id)
video = entity.WVideo(dv, settings.MEDIA_ROOT)
frames = Frame.objects.all().filter(video=dv)
visual_index = alexnet_index_by_id.visual_indexer['alexnet']
index_name, index_results, feat_fname, entries_fname = video.index_frames(frames,visual_index)
i = IndexEntries()
i.video = dv
i.count = len(index_results)
i.contains_frames = True
i.detection_name = 'Frame'
i.algorithm = index_name
i.entries_file_name = entries_fname.split('/')[-1]
i.features_file_name = feat_fname.split('/')[-1]
i.save()
process_video_next(video_id, start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
def query_approximate(q,n,visual_index,clusterer):
vector = visual_index.apply(q.local_path)
results = {}
results[visual_index.name] = []
coarse, fine, results_indexes = clusterer.apply(vector, n)
for i,k in enumerate(results_indexes[0]):
e = ClusterCodes.objects.get(searcher_index=k.id,clusters=clusterer.dc)
if e.detection_id:
results[visual_index.name].append({
'rank':i+1,
'dist':i,
'detection_primary_key':e.detection_id,
'frame_index':e.frame.frame_index,
'frame_primary_key': e.frame_id,
'video_primary_key':e.video_id,
'type': 'detection',
})
else:
results[visual_index.name].append({
'rank':i+1,
'dist':i,
'detection_primary_key':e.detection_id,
'frame_index':e.frame.frame_index,
'frame_primary_key': e.frame_id,
'video_primary_key':e.video_id,
'type': 'frame',
})
return results
@app.task(name="inception_query_by_image",base=IndexerTask)
def inception_query_by_image(query_id):
dq = Query.objects.get(id=query_id)
start = TEvent()
start.video_id = Video.objects.get(parent_query=dq).pk
start.started = True
start.operation = inception_query_by_image.name
start.save()
start_time = time.time()
inception = inception_query_by_image.visual_indexer['inception']
Q = entity.WQuery(dquery=dq, media_dir=settings.MEDIA_ROOT,visual_index=inception)
exact = True # by default run exact search
if dq.approximate:
if inception_query_by_image.clusterer['inception'] is None:
inception_query_by_image.load_clusterer('inception')
clusterer = inception_query_by_image.clusterer['inception']
if clusterer:
results = query_approximate(Q,dq.count,inception,clusterer)
exact = False
if exact:
inception_query_by_image.refresh_index('inception')
results = Q.find(dq.count)
dq.results = True
dq.results_metadata = json.dumps(results)
for algo,rlist in results.iteritems():
for r in rlist:
qr = QueryResults()
qr.query = dq
if 'detection_primary_key' in r:
qr.detection_id = r['detection_primary_key']
qr.frame_id = r['frame_primary_key']
qr.video_id = r['video_primary_key']
qr.algorithm = algo
qr.rank = r['rank']
qr.distance = r['dist']
qr.save()
dq.save()
start.completed = True
start.seconds = time.time() - start_time
start.save()
return results
@app.task(name="alexnet_query_by_image",base=IndexerTask)
def alexnet_query_by_image(query_id):
dq = Query.objects.get(id=query_id)
start = TEvent()
start.video_id = Video.objects.get(parent_query=dq).pk
start.started = True
start.operation = alexnet_query_by_image.name
start.save()
start_time = time.time()
alexnet_query_by_image.refresh_index('alexnet')
alexnet = alexnet_query_by_image.visual_indexer['alexnet']
Q = entity.WQuery(dquery=dq, media_dir=settings.MEDIA_ROOT,visual_index=alexnet)
results = Q.find(10)
dq.results = True
dq.results_metadata = json.dumps(results)
for algo,rlist in results.iteritems():
for r in rlist:
qr = QueryResults()
qr.query = dq
qr.frame_id = r['frame_primary_key']
qr.video_id = r['video_primary_key']
qr.algorithm = algo
qr.rank = r['rank']
qr.distance = r['dist']
qr.save()
dq.save()
start.completed = True
start.seconds = time.time() - start_time
start.save()
return results
@app.task(name="facenet_query_by_image",base=IndexerTask)
def facenet_query_by_image(query_id):
dq = Query.objects.get(id=query_id)
start = TEvent()
start.video_id = Video.objects.get(parent_query=dq).pk
start.started = True
start.operation = facenet_query_by_image.name
start.save()
start_time = time.time()
facenet = facenet_query_by_image.visual_indexer['facenet']
Q = entity.WQuery(dquery=dq, media_dir=settings.MEDIA_ROOT,visual_index=facenet)
exact = True
if dq.approximate:
if facenet_query_by_image.clusterer['facenet'] is None:
facenet_query_by_image.load_clusterer('facenet')
clusterer = facenet_query_by_image.clusterer['facenet']
if clusterer:
results = query_approximate(Q,dq.count,facenet,clusterer)
exact = False
if exact:
facenet_query_by_image.refresh_index('facenet')
results = Q.find(dq.count)
for algo,rlist in results.iteritems():
for r in rlist:
qr = QueryResults()
qr.query = dq
dd = Detection.objects.get(pk=r['detection_primary_key'])
qr.detection = dd
qr.frame_id = dd.frame_id
qr.video_id = r['video_primary_key']
qr.algorithm = algo
qr.rank = r['rank']
qr.distance = r['dist']
qr.save()
dq.results = True
dq.save()
start.completed = True
start.seconds = time.time() - start_time
start.save()
return results
def set_directory_labels(frames,dv):
labels_to_frame = defaultdict(set)
for f in frames:
if f.name:
for l in f.subdir.split('/')[1:]:
if l.strip():
labels_to_frame[l].add(f.primary_key)
for l in labels_to_frame:
label_object, created = VLabel.objects.get_or_create(label_name=l,source=VLabel.DIRECTORY,video=dv)
_, created = VLabel.objects.get_or_create(label_name=l, source=VLabel.UI,video=dv)
for fpk in labels_to_frame[l]:
a = Annotation()
a.full_frame = True
a.video = dv
a.frame_id = fpk
a.label_parent = label_object
a.label = l
a.save()
@app.task(name="extract_frames_by_id")
def extract_frames(video_id,rescale=True):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = extract_frames.name
start.save()
start_time = time.time()
dv = Video.objects.get(id=video_id)
v = entity.WVideo(dvideo=dv, media_dir=settings.MEDIA_ROOT)
time.sleep(3) # otherwise ffprobe randomly fails
if not dv.dataset:
v.get_metadata()
dv.metadata = v.metadata
dv.length_in_seconds = v.duration
dv.height = v.height
dv.width = v.width
dv.save()
if 'RESCALE_DISABLE' in os.environ:
rescale = False
frames = v.extract_frames(rescale)
dv.frames = len(frames)
dv.save()
for f in frames:
df = Frame()
df.frame_index = f.frame_index
df.video = dv
if f.name:
df.name = f.name[:150]
df.subdir = f.subdir.replace('/',' ')
df.save()
f.primary_key = df.pk
set_directory_labels(frames,dv)
process_video_next(video_id,start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
if dv.dataset:
os.remove("{}/{}/video/{}.zip".format(settings.MEDIA_ROOT, dv.pk, dv.pk))
return 0
@app.task(name="perform_yolo_detection_by_id")
def perform_yolo_detection_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = perform_yolo_detection_by_id.name
start.save()
start_time = time.time()
detector = subprocess.Popen(['fab','yolo_detect:{}'.format(video_id)],cwd=os.path.join(os.path.abspath(__file__).split('tasks.py')[0],'../'))
detector.wait()
if detector.returncode != 0:
start.errored = True
start.error_message = "fab yolo_detect failed with return code {}".format(detector.returncode)
start.seconds = time.time() - start_time
start.save()
raise ValueError,start.error_message
process_video_next(video_id,start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
return 0
@app.task(name="perform_ssd_detection_by_id")
def perform_ssd_detection_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = perform_ssd_detection_by_id.name
start.save()
start_time = time.time()
detector = subprocess.Popen(['fab','ssd_detect:{}'.format(video_id)],cwd=os.path.join(os.path.abspath(__file__).split('tasks.py')[0],'../'))
detector.wait()
if detector.returncode != 0:
start.errored = True
start.error_message = "fab ssd_detect failed with return code {}".format(detector.returncode)
start.seconds = time.time() - start_time
start.save()
raise ValueError, start.error_message
process_video_next(video_id,start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
return 0
@app.task(name="perform_face_detection_indexing_by_id")
def perform_face_detection_indexing_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = perform_face_detection_indexing_by_id.name
start.save()
start_time = time.time()
face_detector = subprocess.Popen(['fab','perform_face_detection:{}'.format(video_id)],cwd=os.path.join(os.path.abspath(__file__).split('tasks.py')[0],'../'))
face_detector.wait()
if face_detector.returncode != 0:
start.errored = True
start.error_message = "fab perform_face_detection failed with return code {}".format(face_detector.returncode)
start.seconds = time.time() - start_time
start.save()
raise ValueError, start.error_message
process_video_next(video_id,start.operation)
start.completed = True
start.seconds = time.time() - start_time
start.save()
return 0
def perform_face_indexing(video_id):
face_indexer = indexer.FacenetIndexer()
dv = Video.objects.get(id=video_id)
video = entity.WVideo(dv, settings.MEDIA_ROOT)
frames = Frame.objects.all().filter(video=dv)
wframes = [entity.WFrame(video=video, frame_index=df.frame_index, primary_key=df.pk) for df in frames]
input_paths = {f.local_path():f.primary_key for f in wframes}
faces_dir = '{}/{}/detections'.format(settings.MEDIA_ROOT,video_id)
indexes_dir = '{}/{}/indexes'.format(settings.MEDIA_ROOT,video_id)
face_detector = detector.FaceDetector()
aligned_paths = face_detector.detect(wframes)
logging.info(len(aligned_paths))
faces = []
faces_to_pk = {}
count = 0
for path,v in aligned_paths.iteritems():
for scaled_img,bb in v:
d = Detection()
d.video = dv
d.confidence = 100.0
d.frame_id = input_paths[path]
d.object_name = "mtcnn_face"
left, top, right, bottom = bb[0], bb[1], bb[2], bb[3]
d.y = top
d.x = left
d.w = right-left
d.h = bottom-top
d.save()
face_path = '{}/{}.jpg'.format(faces_dir,d.pk)
output_filename = os.path.join(faces_dir,face_path)
misc.imsave(output_filename, scaled_img)
faces.append(face_path)
faces_to_pk[face_path] = d.pk
count += 1
dv.refresh_from_db()
dv.detections = dv.detections + count
dv.save()
path_count, emb_array, entries,feat_fname, entries_fname = face_indexer.index_faces(faces,faces_to_pk,indexes_dir,video_id)
i = IndexEntries()
i.video = dv
i.count = len(entries)
i.contains_frames = False
i.contains_detections = True
i.detection_name = "Face"
i.algorithm = 'facenet'
i.entries_file_name = entries_fname.split('/')[-1]
i.features_file_name = feat_fname.split('/')[-1]
i.save()
@app.task(name="export_video_by_id")
def export_video_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = export_video_by_id.name
start.save()
start_time = time.time()
video_obj = Video.objects.get(pk=video_id)
export = Export()
export.video = video_obj
file_name = '{}_{}.dva_export.zip'.format(video_id, int(calendar.timegm(time.gmtime())))
export.file_name = file_name
export.save()
try:
os.mkdir("{}/{}".format(settings.MEDIA_ROOT,'exports'))
except:
pass
outdirname = "{}/exports/{}".format(settings.MEDIA_ROOT,video_id)
if os.path.isdir(outdirname):
shutil.rmtree(outdirname)
shutil.copytree('{}/{}'.format(settings.MEDIA_ROOT,video_id),"{}/exports/{}".format(settings.MEDIA_ROOT,video_id))
a = serializers.VideoExportSerializer(instance=video_obj)
with file("{}/exports/{}/table_data.json".format(settings.MEDIA_ROOT,video_id),'w') as output:
json.dump(a.data,output)
zipper = subprocess.Popen(['zip',file_name,'-r','{}'.format(video_id)],cwd='{}/exports/'.format(settings.MEDIA_ROOT))
zipper.wait()
if zipper.returncode != 0:
start.errored = True
start.error_message = "Could not zip {}".format(zipper.returncode)
start.seconds = time.time() - start_time
start.save()
raise ValueError, start.error_message
shutil.rmtree("{}/exports/{}".format(settings.MEDIA_ROOT,video_id))
export.completed = True
export.save()
start.completed = True
start.seconds = time.time() - start_time
start.save()
return export.file_name
@app.task(name="import_video_by_id")
def import_video_by_id(video_id):
start = TEvent()
start.video_id = video_id
start.started = True
start.operation = import_video_by_id.name
start.save()
start_time = time.time()
video_obj = Video.objects.get(pk=video_id)
if video_obj.vdn_dataset and not video_obj.uploaded:
output_filename = "{}/{}/{}.zip".format(settings.MEDIA_ROOT,video_obj.pk,video_obj.pk)
if video_obj.vdn_dataset.aws_requester_pays:
s3import = S3Import()
s3import.video = video_obj
s3import.key = video_obj.vdn_dataset.aws_key
s3import.region = video_obj.vdn_dataset.aws_region
s3import.bucket = video_obj.vdn_dataset.aws_bucket
s3import.requester_pays = True
s3import.save()
task_name = "import_video_from_s3"
app.send_task(task_name, args=[s3import.pk, ], queue=settings.TASK_NAMES_TO_QUEUE[task_name])
start.completed = True
start.seconds = time.time() - start_time
start.save()
return 0
else:
if 'www.dropbox.com' in video_obj.vdn_dataset.download_url and not video_obj.vdn_dataset.download_url.endswith('?dl=1'):
r = requests.get(video_obj.vdn_dataset.download_url+'?dl=1')
else:
r = requests.get(video_obj.vdn_dataset.download_url)
with open(output_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
r.close()
video_obj.uploaded = True
video_obj.save()
zipf = zipfile.ZipFile("{}/{}/{}.zip".format(settings.MEDIA_ROOT, video_id, video_id), 'r')
zipf.extractall("{}/{}/".format(settings.MEDIA_ROOT, video_id))
zipf.close()
video_root_dir = "{}/{}/".format(settings.MEDIA_ROOT, video_id)
old_key = None
for k in os.listdir(video_root_dir):
unzipped_dir = "{}{}".format(video_root_dir, k)
if os.path.isdir(unzipped_dir):
for subdir in os.listdir(unzipped_dir):
shutil.move("{}/{}".format(unzipped_dir,subdir),"{}".format(video_root_dir))
shutil.rmtree(unzipped_dir)
break
with open("{}/{}/table_data.json".format(settings.MEDIA_ROOT, video_id)) as input_json:
video_json = json.load(input_json)
serializers.import_video_json(video_obj,video_json,video_root_dir)
source_zip = "{}/{}.zip".format(video_root_dir, video_obj.pk)
os.remove(source_zip)
start.completed = True
start.seconds = time.time() - start_time
start.save()
def perform_export(s3_export):
s3 = boto3.resource('s3')
if s3_export.region == 'us-east-1':
s3.create_bucket(Bucket=s3_export.bucket)
else:
s3.create_bucket(Bucket=s3_export.bucket, CreateBucketConfiguration={'LocationConstraint': s3_export.region})
time.sleep(20) # wait for it to create the bucket
path = "{}/{}/".format(settings.MEDIA_ROOT,s3_export.video.pk)
a = serializers.VideoExportSerializer(instance=s3_export.video)
exists = False
try:
s3.Object(s3_export.bucket,'{}/table_data.json'.format(s3_export.key).replace('//','/')).load()
except ClientError as e:
if e.response['Error']['Code'] == "404":
exists = False
else:
raise
else:
return -1,"Error key already exists"
with file("{}/{}/table_data.json".format(settings.MEDIA_ROOT,s3_export.video.pk),'w') as output:
json.dump(a.data,output)
upload = subprocess.Popen(args=["aws", "s3", "sync", ".", "s3://{}/{}/".format(s3_export.bucket,s3_export.key)],cwd=path)
upload.communicate()
upload.wait()
s3_export.completed = True
s3_export.save()
return upload.returncode,""
@app.task(name="backup_video_to_s3")
def backup_video_to_s3(s3_export_id):
s3_export = S3Export.objects.get(pk=s3_export_id)
start = TEvent()
start.video_id = s3_export.video_id
start.started = True
start.operation = backup_video_to_s3.name
start.save()
start_time = time.time()
returncode, errormsg = perform_export(s3_export)
if returncode == 0:
start.completed = True
else:
start.errored = True
start.error_message = errormsg
start.seconds = time.time() - start_time
start.save()
@app.task(name="push_video_to_vdn_s3")
def push_video_to_vdn_s3(s3_export_id):
s3_export = S3Export.objects.get(pk=s3_export_id)
start = TEvent()
start.video_id = s3_export.video_id
start.started = True
start.operation = push_video_to_vdn_s3.name
start.save()
start_time = time.time()
returncode, errormsg = perform_export(s3_export)
if returncode == 0:
start.completed = True
else:
start.errored = True
start.error_message = errormsg
start.seconds = time.time() - start_time
start.save()
def download_dir(client, resource, dist, local, bucket):
"""
Taken from http://stackoverflow.com/questions/31918960/boto3-to-download-all-files-from-a-s3-bucket
:param client:
:param resource:
:param dist:
:param local:
:param bucket:
:return:
"""
paginator = client.get_paginator('list_objects')
for result in paginator.paginate(Bucket=bucket, Delimiter='/', Prefix=dist, RequestPayer='requester'):
if result.get('CommonPrefixes') is not None:
for subdir in result.get('CommonPrefixes'):
download_dir(client, resource, subdir.get('Prefix'), local, bucket)
if result.get('Contents') is not None:
for ffile in result.get('Contents'):
if not os.path.exists(os.path.dirname(local + os.sep + ffile.get('Key'))):
os.makedirs(os.path.dirname(local + os.sep + ffile.get('Key')))
resource.meta.client.download_file(bucket, ffile.get('Key'), local + os.sep + ffile.get('Key'),
ExtraArgs={'RequestPayer':'requester'})
@app.task(name="import_video_from_s3")
def import_video_from_s3(s3_import_id):
s3_import= S3Import.objects.get(pk=s3_import_id)
start = TEvent()
start.video_id = s3_import.video_id
start.started = True
start.operation = import_video_from_s3.name
start.save()
start_time = time.time()
path = "{}/{}/".format(settings.MEDIA_ROOT,s3_import.video.pk)
if s3_import.requester_pays:
client = boto3.client('s3')
resource = boto3.resource('s3')
download_dir(client, resource,s3_import.key,path,s3_import.bucket)
for filename in os.listdir(os.path.join(path,s3_import.key)):
shutil.move(os.path.join(path,s3_import.key, filename), os.path.join(path, filename))
os.rmdir(os.path.join(path,s3_import.key))
else:
command = ["aws", "s3", "cp", "s3://{}/{}/".format(s3_import.bucket,s3_import.key),'.','--recursive']
command_exec = " ".join(command)
download = subprocess.Popen(args=command,cwd=path)
download.communicate()
download.wait()
if download.returncode != 0:
start.errored = True
start.error_message = "return code for '{}' was {}".format(command_exec,download.returncode)
start.seconds = time.time() - start_time
start.save()
raise ValueError,start.error_message
with open("{}/{}/table_data.json".format(settings.MEDIA_ROOT, s3_import.video.pk)) as input_json:
video_json = json.load(input_json)
serializers.import_video_json(s3_import.video,video_json,path)
s3_import.completed = True
s3_import.save()
start.completed = True
start.seconds = time.time() - start_time
start.save()
@app.task(name="perform_clustering")
def perform_clustering(cluster_task_id,test=False):
start = TEvent()
start.started = True
start.operation = perform_clustering.name
start.save()
start_time = time.time()
clusters_dir = "{}/clusters/".format(settings.MEDIA_ROOT)
if not os.path.isdir(clusters_dir):
os.mkdir(clusters_dir)
dc = Clusters.objects.get(pk=cluster_task_id)
fnames = []
for ipk in dc.included_index_entries_pk:
k = IndexEntries.objects.get(pk=ipk)
fnames.append("{}/{}/indexes/{}".format(settings.MEDIA_ROOT, k.video.pk, k.features_file_name))
cluster_proto_filename = "{}{}.proto".format(clusters_dir,dc.pk)
c = clustering.Clustering(fnames, dc.components,cluster_proto_filename,m=dc.m,v=dc.v,sub=dc.sub,test_mode=test)
c.cluster()
for e in c.entries:
cc = ClusterCodes()
cc.video_id = e['video_primary_key']
if 'detection_primary_key' in e:
cc.detection_id = e['detection_primary_key']
cc.frame_id = Detection.objects.get(pk=cc.detection_id).frame_id
else:
cc.frame_id = e['frame_primary_key']
cc.clusters = dc
cc.coarse = e['coarse']
cc.fine = e['fine']
cc.coarse_text = " ".join(map(str,e['coarse']))
cc.fine_text = " ".join(map(str,e['fine']))
cc.searcher_index = e['index']
cc.save()
c.save()
dc.completed = True
dc.save()
start.completed = True
start.seconds = time.time() - start_time
start.save() | 39.936842 | 169 | 0.644373 |
977dc631c0f1b8c43252a6d0f6882afb8779fd6e | 1,642 | py | Python | lit_nlp/components/pca.py | noahcb/lit | aae2af6b63003d1346e99995aa6213be2112c352 | [
"Apache-2.0"
] | 3 | 2020-11-27T12:47:44.000Z | 2022-01-13T21:17:02.000Z | lit_nlp/components/pca.py | screwdriver66/lit | a40ef90b514383fb78b3f8742aea31135445693e | [
"Apache-2.0"
] | 4 | 2021-09-20T22:29:08.000Z | 2022-02-27T14:30:20.000Z | lit_nlp/components/pca.py | screwdriver66/lit | a40ef90b514383fb78b3f8742aea31135445693e | [
"Apache-2.0"
] | 1 | 2020-08-14T23:06:34.000Z | 2020-08-14T23:06:34.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Lint as: python3
"""Implementation of PCA as a dimensionality reduction model."""
from absl import logging
from lit_nlp.components import projection
import numpy as np
from sklearn import decomposition
class PCAModel(projection.ProjectorModel):
"""LIT model API implementation for PCA."""
def __init__(self, **pca_kw):
self._pca = decomposition.PCA(**pca_kw)
self._fitted = False
##
# Training methods
def fit_transform(self, inputs):
x_input = [i["x"] for i in inputs]
if not x_input:
return []
x_train = np.stack(x_input)
logging.info("PCA input x_train: %s", str(x_train.shape))
zs = self._pca.fit_transform(x_train)
self._fitted = True
return ({"z": z} for z in zs)
##
# LIT model API
def predict_minibatch(self, inputs, **unused_kw):
if not self._fitted:
return ({"z": [0, 0, 0]} for i in inputs)
x = np.stack([i["x"] for i in inputs])
zs = self._pca.transform(x)
return ({"z": z} for z in zs)
| 32.196078 | 80 | 0.663216 |
e0b6dc04eb022709724cc9f092c1b75ad30cbb74 | 1,340 | py | Python | app/tests/api/event_source/test_delete.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | app/tests/api/event_source/test_delete.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | app/tests/api/event_source/test_delete.py | hollyfoxx/ace2-ams-api | 08ecf3f3dc8ac9abd224465731458950d4f78b7d | [
"Apache-2.0"
] | null | null | null | import uuid
from fastapi import status
"""
NOTE: There are no tests for the foreign key constraints. The DELETE endpoint will need to be updated once the endpoints
are in place in order to account for this.
"""
#
# INVALID TESTS
#
def test_delete_invalid_uuid(client_valid_access_token):
delete = client_valid_access_token.delete("/api/event/source/1")
assert delete.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
def test_delete_nonexistent_uuid(client_valid_access_token):
delete = client_valid_access_token.delete(f"/api/event/source/{uuid.uuid4()}")
assert delete.status_code == status.HTTP_404_NOT_FOUND
#
# VALID TESTS
#
def test_delete(client_valid_access_token):
# Create the object
create = client_valid_access_token.post("/api/event/source/", json={"value": "test"})
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.status_code == status.HTTP_200_OK
# Delete it
delete = client_valid_access_token.delete(create.headers["Content-Location"])
assert delete.status_code == status.HTTP_204_NO_CONTENT
# Make sure it is gone
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.status_code == status.HTTP_404_NOT_FOUND
| 27.916667 | 120 | 0.760448 |
03bd24670967db664f55da988dc7352ec5d07f36 | 410 | py | Python | pyGRBz/cli.py | dcorre/pyGRBz | 4955e9454a19fcc409649ad623c31d5bec66cc64 | [
"MIT"
] | 2 | 2020-05-21T15:06:48.000Z | 2021-08-17T07:22:09.000Z | pyGRBz/cli.py | dcorre/pyGRBz | 4955e9454a19fcc409649ad623c31d5bec66cc64 | [
"MIT"
] | null | null | null | pyGRBz/cli.py | dcorre/pyGRBz | 4955e9454a19fcc409649ad623c31d5bec66cc64 | [
"MIT"
] | 3 | 2021-07-29T10:42:16.000Z | 2022-03-11T07:15:53.000Z | # -*- coding: utf-8 -*-
"""Console script for pyGRBz."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for pyGRBz."""
click.echo("Replace this message by putting your code into "
"pyGRBz.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| 21.578947 | 68 | 0.634146 |
d2de1612fa593c61b2f64bec3a3f0918cd41df5c | 18,442 | py | Python | pjrpc/server/dispatcher.py | dapper91/pjrpc | cbf5c9e072935f9e59d306244c67499a4ad3e0f5 | [
"Unlicense"
] | 10 | 2020-03-15T06:41:58.000Z | 2022-03-17T08:55:53.000Z | pjrpc/server/dispatcher.py | dapper91/pjrpc | cbf5c9e072935f9e59d306244c67499a4ad3e0f5 | [
"Unlicense"
] | 41 | 2019-11-16T09:57:54.000Z | 2022-03-31T17:34:13.000Z | pjrpc/server/dispatcher.py | dapper91/pjrpc | cbf5c9e072935f9e59d306244c67499a4ad3e0f5 | [
"Unlicense"
] | 1 | 2022-03-17T08:21:28.000Z | 2022-03-17T08:21:28.000Z | import asyncio
import collections
import functools as ft
import json
import itertools as it
import logging
from typing import Any, Callable, Dict, ItemsView, KeysView, List, Optional, Type, Iterator, Iterable, Union, ValuesView
import pjrpc
from pjrpc.common import v20, BatchRequest, BatchResponse, Request, Response, UNSET, UnsetType
from pjrpc.server import utils
from . import validators
logger = logging.getLogger(__package__)
default_validator = validators.base.BaseValidator()
class Method:
"""
JSON-RPC method wrapper. Stores method itself and some metainformation.
:param method: method
:param name: method name
:param context: context name
"""
def __init__(self, method: Callable, name: Optional[str] = None, context: Optional[Any] = None):
self.method = method
self.name = name or method.__name__
self.context = context
meta = utils.set_meta(method, method_name=self.name, context_name=context)
self.validator, self.validator_args = meta.get('validator', default_validator), meta.get('validator_args', {})
def bind(self, params: Optional[Union[list, dict]], context: Optional[Any] = None) -> Callable:
method_params = self.validator.validate_method(
self.method, params, exclude=(self.context,) if self.context else (), **self.validator_args
)
if self.context is not None:
method_params[self.context] = context
return ft.partial(self.method, **method_params)
def copy(self, **kwargs) -> 'Method':
cls_kwargs = dict(name=self.name, context=self.context)
cls_kwargs.update(kwargs)
return Method(method=self.method, **cls_kwargs)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Method):
return NotImplemented
return (self.method, self.name, self.context) == (other.method, other.name, other.context)
class ViewMethod(Method):
"""
View method.
:param view_cls: view class
:param name: view class method name
:param context: context name
"""
def __init__(
self,
view_cls: Type['ViewMixin'],
method_name: str,
name: Optional[str] = None,
context: Optional[Any] = None,
):
super().__init__(getattr(view_cls, method_name), name or method_name, context)
self.view_cls = view_cls
self.method_name = method_name
def bind(self, params: Optional[Union[list, dict]], context: Optional[Any] = None) -> Callable:
view = self.view_cls(context) if self.context else self.view_cls()
method = getattr(view, self.method_name)
method_params = self.validator.validate_method(method, params, **self.validator_args)
return ft.partial(method, **method_params)
def copy(self, **kwargs) -> 'ViewMethod':
cls_kwargs = dict(name=self.name, context=self.context)
cls_kwargs.update(kwargs)
return ViewMethod(view_cls=self.view_cls, method_name=self.method_name, **cls_kwargs)
class ViewMixin:
"""
Simple class based method handler mixin. Exposes all public methods.
"""
@classmethod
def __methods__(cls):
for attr_name in filter(lambda name: not name.startswith('_'), dir(cls)):
attr = getattr(cls, attr_name)
if callable(attr):
yield attr
class MethodRegistry:
"""
Method registry.
:param prefix: method name prefix to be used for naming containing methods
"""
def __init__(self, prefix: Optional[str] = None):
self._prefix = prefix
self._registry: Dict[str, Method] = {}
def __iter__(self) -> Iterator[str]:
"""
Returns registry method iterator.
"""
return iter(self._registry)
def __getitem__(self, item: str) -> Method:
"""
Returns a method from the registry by name.
:param item: method name
:returns: found method
:raises: KeyError
"""
return self._registry[item]
def items(self) -> ItemsView[str, Method]:
return self._registry.items()
def keys(self) -> KeysView[str]:
return self._registry.keys()
def values(self) -> ValuesView[Method]:
return self._registry.values()
def get(self, item: str) -> Optional[Method]:
"""
Returns a method from the registry by name.
:param item: method name
:returns: found method or `None`
"""
return self._registry.get(item)
def add(
self, maybe_method: Optional[Callable] = None, name: Optional[str] = None, context: Optional[Any] = None,
) -> Callable:
"""
Decorator adding decorated method to the registry.
:param maybe_method: method or `None`
:param name: method name to be used instead of `__name__` attribute
:param context: parameter name to be used as an application context
:returns: decorated method or decorator
"""
def decorator(method: Callable) -> Callable:
full_name = '.'.join(filter(None, (self._prefix, name or method.__name__)))
self.add_methods(Method(method, full_name, context))
return method
if maybe_method is None:
return decorator
else:
return decorator(maybe_method)
def add_methods(self, *methods: Union[Callable, Method]) -> None:
"""
Adds methods to the registry.
:param methods: methods to be added. Each one can be an instance of :py:class:`pjrpc.server.Method`
or plain method
"""
for method in methods:
if isinstance(method, Method):
self._add_method(method)
else:
self.add(method)
def view(
self, maybe_view: Optional[Type[ViewMixin]] = None, context: Optional[Any] = None, prefix: Optional[str] = None,
) -> Union[ViewMixin, Callable]:
"""
Methods view decorator.
:param maybe_view: view class instance or `None`
:param context: application context name
:param prefix: view methods prefix
:return: decorator or decorated view
"""
def decorator(view: Type[ViewMixin]) -> Type[ViewMixin]:
for method in view.__methods__():
full_name = '.'.join(filter(None, (self._prefix, prefix, method.__name__)))
self._add_method(ViewMethod(view, method.__name__, full_name, context))
return view
# maybe_view's type depends on the usage of the decorator. It's a View
# if it's used as `@view` but ``None`` if used as `@view()`.
if maybe_view is None:
return decorator
else:
return decorator(maybe_view)
def merge(self, other: 'MethodRegistry') -> None:
"""
Merges two registries.
:param other: registry to be merged in the current one
"""
for name, method in other.items():
if self._prefix:
name = f'{self._prefix}.{name}'
self._add_method(method.copy(name=name))
def _add_method(self, method: Method) -> None:
if method.name in self._registry:
logger.warning(f"method '{method.name}' already registered")
self._registry[method.name] = method
class JSONEncoder(pjrpc.JSONEncoder):
"""
Server JSON encoder. All custom server encoders should be inherited from it.
"""
def default(self, o: Any) -> Any:
if isinstance(o, validators.base.ValidationError):
return [err for err in o.args]
return super().default(o)
class Dispatcher:
"""
Method dispatcher.
:param request_class: JSON-RPC request class
:param response_class: JSON-RPC response class
:param batch_request: JSON-RPC batch request class
:param batch_response: JSON-RPC batch response class
:param json_loader: request json loader
:param json_dumper: response json dumper
:param json_encoder: response json encoder
:param json_decoder: request json decoder
:param middlewares: request middlewares
:param error_handlers: request error handlers
"""
def __init__(
self,
*,
request_class: Type[Request] = v20.Request,
response_class: Type[Response] = v20.Response,
batch_request: Type[BatchRequest] = v20.BatchRequest,
batch_response: Type[BatchResponse] = v20.BatchResponse,
json_loader: Callable = json.loads,
json_dumper: Callable = json.dumps,
json_encoder: Type[JSONEncoder] = JSONEncoder,
json_decoder: Optional[Type[json.JSONDecoder]] = None,
middlewares: Iterable[Callable] = (),
error_handlers: Dict[Union[None, int, Exception], List[Callable]] = {},
):
self._json_loader = json_loader
self._json_dumper = json_dumper
self._json_encoder = json_encoder
self._json_decoder = json_decoder
self._request_class = request_class
self._response_class = response_class
self._batch_request = batch_request
self._batch_response = batch_response
self._middlewares = list(middlewares)
self._error_handlers = error_handlers
self._registry = MethodRegistry()
@property
def registry(self) -> MethodRegistry:
return self._registry
def add(self, method: Callable, name: Optional[str] = None, context: Optional[Any] = None) -> None:
"""
Adds method to the registry.
:param method: method
:param name: method name
:param context: application context name
"""
self._registry.add(method, name, context)
def add_methods(self, *methods: Union[MethodRegistry, Method, Callable]) -> None:
"""
Adds methods to the registry.
:param methods: method list. Each method may be an instance of :py:class:`pjrpc.server.MethodRegistry`,
:py:class:`pjrpc.server.Method` or plain function
"""
for method in methods:
if isinstance(method, MethodRegistry):
self._registry.merge(method)
elif isinstance(method, Method):
self._registry.add_methods(method)
else:
self._registry.add(method)
def view(self, view: Type[ViewMixin]) -> None:
"""
Adds class based view to the registry.
:param view: view to be added
"""
self._registry.view(view)
def dispatch(self, request_text: str, context: Optional[Any] = None) -> Optional[str]:
"""
Deserializes request, dispatches it to the required method and serializes the result.
:param request_text: request text representation
:param context: application context (if supported)
:return: response text representation
"""
logger.debug("request received: %s", request_text)
try:
request_json = self._json_loader(request_text, cls=self._json_decoder)
if isinstance(request_json, (list, tuple)):
request = self._batch_request.from_json(request_json)
else:
request = self._request_class.from_json(request_json)
except json.JSONDecodeError as e:
response = self._response_class(id=None, error=pjrpc.exceptions.ParseError(data=str(e)))
except (pjrpc.exceptions.DeserializationError, pjrpc.exceptions.IdentityError) as e:
response = self._response_class(id=None, error=pjrpc.exceptions.InvalidRequestError(data=str(e)))
else:
if isinstance(request, collections.abc.Iterable):
response = self._batch_response(
*filter(
lambda resp: resp is not UNSET, (
self._handle_request(request, context) for request in request
),
)
)
else:
response = self._handle_request(request, context)
if response is not UNSET:
response_text = self._json_dumper(response.to_json(), cls=self._json_encoder)
logger.debug("response sent: %s", response_text)
return response_text
def _handle_request(self, request: Request, context: Optional[Any]) -> Union[UnsetType, Response]:
try:
handler = self._handle_rpc_request
for middleware in reversed(self._middlewares):
handler = ft.partial(middleware, handler=handler)
return handler(request, context)
except pjrpc.exceptions.JsonRpcError as e:
logger.info("method execution error %s(%r): %r", request.method, request.params, e)
error = e
except Exception as e:
logger.exception("internal server error: %r", e)
error = pjrpc.exceptions.InternalError()
for handler in it.chain(self._error_handlers.get(None, []), self._error_handlers.get(error.code, [])):
error = handler(request, context, error)
if request.id is None:
return UNSET
return self._response_class(id=request.id, error=error)
def _handle_rpc_request(self, request: Request, context: Optional[Any]) -> Union[UnsetType, Response]:
result = self._handle_rpc_method(request.method, request.params, context)
if request.id is None:
return UNSET
return self._response_class(id=request.id, result=result)
def _handle_rpc_method(self, method_name: str, params: Optional[Union[list, dict]], context: Optional[Any]) -> Any:
method = self._registry.get(method_name)
if method is None:
raise pjrpc.exceptions.MethodNotFoundError(data=f"method '{method_name}' not found")
try:
method = method.bind(params, context=context)
except validators.ValidationError as e:
raise pjrpc.exceptions.InvalidParamsError(data=e) from e
try:
return method()
except pjrpc.exceptions.JsonRpcError:
raise
except Exception as e:
logger.exception("method unhandled exception %s(%r): %r", method_name, params, e)
raise pjrpc.exceptions.ServerError() from e
class AsyncDispatcher(Dispatcher):
"""
Asynchronous method dispatcher.
"""
async def dispatch(self, request_text: str, context: Optional[Any] = None) -> Optional[str]:
"""
Deserializes request, dispatches it to the required method and serializes the result.
:param request_text: request text representation
:param context: application context (if supported)
:return: response text representation
"""
logger.debug("request received: %s", request_text)
try:
request_json = self._json_loader(request_text, cls=self._json_decoder)
if isinstance(request_json, (list, tuple)):
request = self._batch_request.from_json(request_json)
else:
request = self._request_class.from_json(request_json)
except json.JSONDecodeError as e:
response = self._response_class(id=None, error=pjrpc.exceptions.ParseError(data=str(e)))
except (pjrpc.exceptions.DeserializationError, pjrpc.exceptions.IdentityError) as e:
response = self._response_class(id=None, error=pjrpc.exceptions.InvalidRequestError(data=str(e)))
else:
if isinstance(request, collections.Iterable):
response = self._batch_response(
*filter(
lambda resp: resp is not UNSET, await asyncio.gather(
*(self._handle_request(request, context) for request in request)
),
)
)
else:
response = await self._handle_request(request, context)
if response is not UNSET:
response_text = self._json_dumper(response.to_json(), cls=self._json_encoder)
logger.debug("response sent: %s", response_text)
return response_text
async def _handle_request(self, request: Request, context: Optional[Any]) -> Union[UnsetType, Response]:
try:
handler = self._handle_rpc_request
for middleware in reversed(self._middlewares):
handler = ft.partial(middleware, handler=handler)
return await handler(request, context)
except pjrpc.exceptions.JsonRpcError as e:
logger.info("method execution error %s(%r): %r", request.method, request.params, e)
error = e
except Exception as e:
logger.exception("internal server error: %r", e)
error = pjrpc.exceptions.InternalError()
for handler in it.chain(self._error_handlers.get(None, []), self._error_handlers.get(error.code, [])):
error = await handler(request, context, error)
if request.id is None:
return UNSET
return self._response_class(id=request.id, error=error)
async def _handle_rpc_request(self, request: Request, context: Optional[Any]) -> Union[UnsetType, Response]:
result = await self._handle_rpc_method(request.method, request.params, context)
if request.id is None:
return UNSET
return self._response_class(id=request.id, result=result)
async def _handle_rpc_method(
self, method_name: str, params: Optional[Union[list, dict]], context: Optional[Any],
) -> Any:
method = self._registry.get(method_name)
if method is None:
raise pjrpc.exceptions.MethodNotFoundError(data=f"method '{method_name}' not found")
try:
method = method.bind(params, context=context)
except validators.ValidationError as e:
raise pjrpc.exceptions.InvalidParamsError(data=e) from e
try:
result = method()
if asyncio.iscoroutine(result):
result = await result
return result
except pjrpc.exceptions.JsonRpcError:
raise
except Exception as e:
logger.exception("method unhandled exception %s(%r): %r", method_name, params, e)
raise pjrpc.exceptions.ServerError() from e
| 34.406716 | 120 | 0.627155 |
b5728ee175df5e72fafca8262595e7e197bee6e4 | 25,285 | py | Python | pocs/observatory.py | AstroHuntsman/POCS | c582bef36b7f2c4da9b8f36144f1e2a10df3b5cc | [
"MIT"
] | 1 | 2018-01-04T16:06:39.000Z | 2018-01-04T16:06:39.000Z | pocs/observatory.py | AstroHuntsman/POCS | c582bef36b7f2c4da9b8f36144f1e2a10df3b5cc | [
"MIT"
] | 49 | 2017-02-13T02:59:18.000Z | 2018-02-12T04:38:56.000Z | pocs/observatory.py | AstroHuntsman/POCS | c582bef36b7f2c4da9b8f36144f1e2a10df3b5cc | [
"MIT"
] | 1 | 2019-04-16T04:52:16.000Z | 2019-04-16T04:52:16.000Z | import os
from collections import OrderedDict
from datetime import datetime
from glob import glob
from astroplan import Observer
from astropy import units as u
from astropy.coordinates import EarthLocation
from astropy.coordinates import get_moon
from astropy.coordinates import get_sun
from . import PanBase
from .images import Image
from .scheduler.constraint import Duration
from .scheduler.constraint import MoonAvoidance
from .utils import current_time
from .utils import error
from .utils import images as img_utils
from .utils import list_connected_cameras
from .utils import load_module
class Observatory(PanBase):
def __init__(self, *args, **kwargs):
"""Main Observatory class
Starts up the observatory. Reads config file, sets up location,
dates, mount, cameras, and weather station
"""
super().__init__(*args, **kwargs)
self.logger.info('Initializing observatory')
# Setup information about site location
self.logger.info('\tSetting up location')
self.location = None
self.earth_location = None
self.observer = None
self._setup_location()
self.logger.info('\tSetting up mount')
self.mount = None
self._create_mount()
self.logger.info('\tSetting up cameras')
self.cameras = OrderedDict()
self._primary_camera = None
self._create_cameras(**kwargs)
self.logger.info('\tSetting up scheduler')
self.scheduler = None
self._create_scheduler()
self.current_offset_info = None
self._image_dir = self.config['directories']['images']
self.logger.info('\t Observatory initialized')
##########################################################################
# Properties
##########################################################################
@property
def is_dark(self):
horizon = self.location.get('twilight_horizon', -18 * u.degree)
t0 = current_time()
is_dark = self.observer.is_night(t0, horizon=horizon)
if not is_dark:
sun_pos = self.observer.altaz(t0, target=get_sun(t0)).alt
self.logger.debug("Sun {:.02f} > {}".format(sun_pos, horizon))
return is_dark
@property
def sidereal_time(self):
return self.observer.local_sidereal_time(current_time())
@property
def primary_camera(self):
return self._primary_camera
@primary_camera.setter
def primary_camera(self, cam):
cam.is_primary = True
self._primary_camera = cam
@property
def current_observation(self):
return self.scheduler.current_observation
@current_observation.setter
def current_observation(self, new_observation):
self.scheduler.current_observation = new_observation
##########################################################################
# Methods
##########################################################################
def power_down(self):
"""Power down the observatory. Currently does nothing
"""
self.logger.debug("Shutting down observatory")
self.mount.disconnect()
def status(self):
"""Get status information for various parts of the observatory
"""
status = {}
try:
t = current_time()
local_time = str(datetime.now()).split('.')[0]
if self.mount.is_initialized:
status['mount'] = self.mount.status()
status['mount']['current_ha'] = self.observer.target_hour_angle(
t, self.mount.get_current_coordinates())
if self.mount.has_target:
status['mount']['mount_target_ha'] = self.observer.target_hour_angle(
t, self.mount.get_target_coordinates())
if self.current_observation:
status['observation'] = self.current_observation.status()
status['observation']['field_ha'] = self.observer.target_hour_angle(
t, self.current_observation.field)
status['observer'] = {
'siderealtime': str(self.sidereal_time),
'utctime': t,
'localtime': local_time,
'local_evening_astro_time': self.observer.twilight_evening_astronomical(t, which='next'),
'local_morning_astro_time': self.observer.twilight_morning_astronomical(t, which='next'),
'local_sun_set_time': self.observer.sun_set_time(t),
'local_sun_rise_time': self.observer.sun_rise_time(t),
'local_moon_alt': self.observer.moon_altaz(t).alt,
'local_moon_illumination': self.observer.moon_illumination(t),
'local_moon_phase': self.observer.moon_phase(t),
}
except Exception as e: # pragma: no cover
self.logger.warning("Can't get observatory status: {}".format(e))
return status
def get_observation(self, *args, **kwargs):
"""Gets the next observation from the scheduler
Returns:
observation (pocs.scheduler.observation.Observation or None): An
an object that represents the obervation to be made
Raises:
error.NoObservation: If no valid observation is found
"""
self.logger.debug("Getting observation for observatory")
self.scheduler.get_observation(*args, **kwargs)
if self.scheduler.current_observation is None:
raise error.NoObservation("No valid observations found")
return self.current_observation
def cleanup_observations(self):
"""Cleanup observation list
Loops through the `observed_list` performing cleanup tasks. Resets
`observed_list` when done
"""
for seq_time, observation in self.scheduler.observed_list.items():
self.logger.debug("Housekeeping for {}".format(observation))
for cam_name, camera in self.cameras.items():
self.logger.debug('Cleanup for camera {} [{}]'.format(
cam_name, camera.uid))
dir_name = "{}/fields/{}/{}/{}/".format(
self.config['directories']['images'],
observation.field.field_name,
camera.uid,
seq_time,
)
img_utils.clean_observation_dir(dir_name)
self.logger.debug('Cleanup finished')
self.scheduler.reset_observed_list()
def observe(self):
"""Take individual images for the current observation
This method gets the current observation and takes the next
corresponding exposure.
"""
# Get observatory metadata
headers = self.get_standard_headers()
# All cameras share a similar start time
headers['start_time'] = current_time(flatten=True)
# List of camera events to wait for to signal exposure is done
# processing
camera_events = dict()
# Take exposure with each camera
for cam_name, camera in self.cameras.items():
self.logger.debug("Exposing for camera: {}".format(cam_name))
try:
# Start the exposures
cam_event = camera.take_observation(
self.current_observation, headers)
camera_events[cam_name] = cam_event
except Exception as e:
self.logger.error("Problem waiting for images: {}".format(e))
return camera_events
def analyze_recent(self):
"""Analyze the most recent exposure
Compares the most recent exposure to the reference exposure and determines
the offset between the two.
Returns:
dict: Offset information
"""
# Clear the offset info
self.current_offset_info = None
pointing_image = self.current_observation.pointing_image
try:
# Get the image to compare
image_id, image_path = self.current_observation.last_exposure
current_image = Image(image_path, location=self.earth_location)
solve_info = current_image.solve_field()
self.logger.debug("Solve Info: {}".format(solve_info))
# Get the offset between the two
self.current_offset_info = current_image.compute_offset(
pointing_image)
self.logger.debug('Offset Info: {}'.format(
self.current_offset_info))
# Update the observation info with the offsets
self.db.observations.update({'data.image_id': image_id}, {
'$set': {
'offset_info': {
'd_ra': self.current_offset_info.delta_ra.value,
'd_dec': self.current_offset_info.delta_dec.value,
'magnitude': self.current_offset_info.magnitude.value,
'unit': 'arcsec'
}
},
})
except error.SolveError:
self.logger.warning("Can't solve field, skipping")
except Exception as e:
self.logger.warning("Problem in analyzing: {}".format(e))
return self.current_offset_info
def update_tracking(self):
"""Update tracking with rate adjustment
Uses the `rate_adjustment` key from the `self.current_offset_info`
"""
if self.current_offset_info is not None:
dec_offset = self.current_offset_info.delta_dec
dec_ms = self.mount.get_ms_offset(dec_offset)
if dec_offset >= 0:
dec_direction = 'north'
else:
dec_direction = 'south'
ra_offset = self.current_offset_info.delta_ra
ra_ms = self.mount.get_ms_offset(ra_offset)
if ra_offset >= 0:
ra_direction = 'west'
else:
ra_direction = 'east'
dec_correction = abs(dec_ms.value) * 1.5
ra_correction = abs(ra_ms.value) * 1.
max_time = 99999
if dec_correction > max_time:
dec_correction = max_time
if ra_correction > max_time:
ra_correction = max_time
self.logger.info("Adjusting Dec: {} {:0.2f} ms {:0.2f}".format(
dec_direction, dec_correction, dec_offset))
if dec_correction >= 1. and dec_correction <= max_time:
self.mount.query('move_ms_{}'.format(
dec_direction), '{:05.0f}'.format(dec_correction))
self.logger.info("Adjusting RA: {} {:0.2f} ms {:0.2f}".format(
ra_direction, ra_correction, ra_offset))
if ra_correction >= 1. and ra_correction <= max_time:
self.mount.query('move_ms_{}'.format(
ra_direction), '{:05.0f}'.format(ra_correction))
return ((ra_direction, ra_offset), (dec_direction, dec_offset))
def get_standard_headers(self, observation=None):
"""Get a set of standard headers
Args:
observation (`~pocs.scheduler.observation.Observation`, optional): The
observation to use for header values. If None is given, use the `current_observation`
Returns:
dict: The standard headers
"""
if observation is None:
observation = self.current_observation
assert observation is not None, self.logger.warning(
"No observation, can't get headers")
field = observation.field
self.logger.debug("Getting headers for : {}".format(observation))
t0 = current_time()
moon = get_moon(t0, self.observer.location)
headers = {
'airmass': self.observer.altaz(t0, field).secz.value,
'creator': "POCSv{}".format(self.__version__),
'elevation': self.location.get('elevation').value,
'ha_mnt': self.observer.target_hour_angle(t0, field).value,
'latitude': self.location.get('latitude').value,
'longitude': self.location.get('longitude').value,
'moon_fraction': self.observer.moon_illumination(t0),
'moon_separation': field.coord.separation(moon).value,
'observer': self.config.get('name', ''),
'origin': 'Project PANOPTES',
'tracking_rate_ra': self.mount.tracking_rate,
}
# Add observation metadata
headers.update(observation.status())
return headers
def autofocus_cameras(self, camera_list=None, coarse=False):
"""
Perform autofocus on all cameras with focus capability, or a named subset of these. Optionally will
perform a coarse autofocus first, otherwise will just fine tune focus.
Args:
camera_list (list, optional): list containing names of cameras to autofocus.
coarse (bool, optional): Whether to performan a coarse autofocus before fine tuning, default False
Returns:
dict of str:threading_Event key:value pairs, containing camera names and corresponding Events which
will be set when the camera completes autofocus
"""
if camera_list:
# Have been passed a list of camera names, extract dictionary
# containing only cameras named in the list
cameras = {cam_name: self.cameras[
cam_name] for cam_name in camera_list if cam_name in self.cameras.keys()}
if cameras == {}:
self.logger.warning(
"Passed a list of camera names ({}) but no matches found".format(camera_list))
else:
# No cameras specified, will try to autofocus all cameras from
# self.cameras
cameras = self.cameras
autofocus_events = dict()
# Start autofocus with each camera
for cam_name, camera in cameras.items():
self.logger.debug("Autofocusing camera: {}".format(cam_name))
try:
assert camera.focuser.is_connected
except AttributeError:
self.logger.debug(
'Camera {} has no focuser, skipping autofocus'.format(cam_name))
except AssertionError:
self.logger.debug(
'Camera {} focuser not connected, skipping autofocus'.format(cam_name))
else:
try:
# Start the autofocus
autofocus_event = camera.autofocus(coarse=coarse)
except Exception as e:
self.logger.error(
"Problem running autofocus: {}".format(e))
else:
autofocus_events[cam_name] = autofocus_event
return autofocus_events
##########################################################################
# Private Methods
##########################################################################
def _setup_location(self):
"""
Sets up the site and location details for the observatory
Note:
These items are read from the 'site' config directive and include:
* name
* latitude
* longitude
* timezone
* presseure
* elevation
* horizon
"""
self.logger.debug('Setting up site details of observatory')
try:
config_site = self.config.get('location')
name = config_site.get('name', 'Nameless Location')
latitude = config_site.get('latitude')
longitude = config_site.get('longitude')
timezone = config_site.get('timezone')
utc_offset = config_site.get('utc_offset')
pressure = config_site.get('pressure', 0.680) * u.bar
elevation = config_site.get('elevation', 0 * u.meter)
horizon = config_site.get('horizon', 30 * u.degree)
twilight_horizon = config_site.get(
'twilight_horizon', -18 * u.degree)
self.location = {
'name': name,
'latitude': latitude,
'longitude': longitude,
'elevation': elevation,
'timezone': timezone,
'utc_offset': utc_offset,
'pressure': pressure,
'horizon': horizon,
'twilight_horizon': twilight_horizon,
}
self.logger.debug("Location: {}".format(self.location))
# Create an EarthLocation for the mount
self.earth_location = EarthLocation(
lat=latitude, lon=longitude, height=elevation)
self.observer = Observer(
location=self.earth_location, name=name, timezone=timezone)
except Exception:
raise error.PanError(msg='Bad site information')
def _create_mount(self, mount_info=None):
"""Creates a mount object.
Details for the creation of the mount object are held in the
configuration file or can be passed to the method.
This method ensures that the proper mount type is loaded.
Note:
This does not actually make a serial connection to the mount. To do so,
call the 'mount.connect()' explicitly.
Args:
mount_info (dict): Configuration items for the mount.
Returns:
pocs.mount: Returns a sub-class of the mount type
"""
if mount_info is None:
mount_info = self.config.get('mount')
model = mount_info.get('model')
port = mount_info.get('port')
if 'mount' in self.config.get('simulator', []):
model = 'simulator'
driver = 'simulator'
mount_info['simulator'] = True
else:
model = mount_info.get('brand')
driver = mount_info.get('driver')
if model != 'bisque':
port = mount_info.get('port')
if port is None or len(glob(port)) == 0:
msg = "Mount port ({}) not available. Use --simulator=mount for simulator. Exiting.".format(port)
raise error.PanError(msg=msg, exit=True)
self.logger.debug('Creating mount: {}'.format(model))
module = load_module('pocs.mount.{}'.format(driver))
# Make the mount include site information
self.mount = module.Mount(location=self.earth_location)
self.logger.debug('Mount created')
def _create_cameras(self, **kwargs):
"""Creates a camera object(s)
Loads the cameras via the configuration.
Creates a camera for each camera item listed in the config. Ensures the
appropriate camera module is loaded.
Note: We are currently only operating with one camera and the `take_pic.sh`
script automatically discovers the ports.
Note:
This does not actually make a usb connection to the camera. To do so,
call the 'camear.connect()' explicitly.
Args:
**kwargs (dict): Can pass a camera_config object that overrides the info in
the configuration file. Can also pass `auto_detect`(bool) to try and
automatically discover the ports.
Returns:
list: A list of created camera objects.
Raises:
error.CameraNotFound: Description
error.PanError: Description
"""
if kwargs.get('camera_info') is None:
camera_info = self.config.get('cameras')
self.logger.debug("Camera config: \n {}".format(camera_info))
a_simulator = 'camera' in self.config.get('simulator', [])
if a_simulator:
self.logger.debug("Using simulator for camera")
ports = list()
# Lookup the connected ports if not using a simulator
auto_detect = kwargs.get(
'auto_detect', camera_info.get('auto_detect', False))
if not a_simulator and auto_detect:
self.logger.debug("Auto-detecting ports for cameras")
try:
ports = list_connected_cameras()
except Exception as e:
self.logger.warning(e)
if len(ports) == 0:
raise error.PanError(
msg="No cameras detected. Use --simulator=camera for simulator.")
else:
self.logger.debug("Detected Ports: {}".format(ports))
for cam_num, camera_config in enumerate(camera_info.get('devices', [])):
cam_name = 'Cam{:02d}'.format(cam_num)
if not a_simulator:
camera_model = camera_config.get('model')
# Assign an auto-detected port. If none are left, skip
if auto_detect:
try:
camera_port = ports.pop()
except IndexError:
self.logger.warning(
"No ports left for {}, skipping.".format(cam_name))
continue
else:
try:
camera_port = camera_config['port']
except KeyError:
raise error.CameraNotFound(
msg="No port specified and auto_detect=False")
camera_focuser = camera_config.get('focuser', None)
camera_readout = camera_config.get('readout_time', 6.0)
else:
# Set up a simulated camera with fully configured simulated
# focuser
camera_model = 'simulator'
camera_port = '/dev/camera/simulator'
camera_focuser = {'model': 'simulator',
'focus_port': '/dev/ttyFAKE',
'initial_position': 20000,
'autofocus_range': (40, 80),
'autofocus_step': (10, 20),
'autofocus_seconds': 0.1,
'autofocus_size': 500}
camera_readout = 0.5
camera_set_point = camera_config.get('set_point', None)
camera_filter = camera_config.get('filter_type', None)
self.logger.debug('Creating camera: {}'.format(camera_model))
try:
module = load_module('pocs.camera.{}'.format(camera_model))
self.logger.debug('Camera module: {}'.format(module))
except ImportError:
raise error.CameraNotFound(msg=camera_model)
else:
# Create the camera object
cam = module.Camera(name=cam_name,
model=camera_model,
port=camera_port,
set_point=camera_set_point,
filter_type=camera_filter,
focuser=camera_focuser,
readout_time=camera_readout)
is_primary = ''
if camera_info.get('primary', '') == cam.uid:
self.primary_camera = cam
is_primary = ' [Primary]'
self.logger.debug("Camera created: {} {} {}".format(
cam.name, cam.uid, is_primary))
self.cameras[cam_name] = cam
# If no camera was specified as primary use the first
if self.primary_camera is None:
self.primary_camera = self.cameras['Cam00']
if len(self.cameras) == 0:
raise error.CameraNotFound(
msg="No cameras available. Exiting.", exit=True)
self.logger.debug("Cameras created")
def _create_scheduler(self):
""" Sets up the scheduler that will be used by the observatory """
scheduler_config = self.config.get('scheduler', {})
scheduler_type = scheduler_config.get('type', 'dispatch')
# Read the targets from the file
fields_file = scheduler_config.get('fields_file', 'simple.yaml')
fields_path = os.path.join(self.config['directories'][
'targets'], fields_file)
self.logger.debug('Creating scheduler: {}'.format(fields_path))
if os.path.exists(fields_path):
try:
# Load the required module
module = load_module(
'pocs.scheduler.{}'.format(scheduler_type))
# Simple constraint for now
constraints = [MoonAvoidance(), Duration(30 * u.deg)]
# Create the Scheduler instance
self.scheduler = module.Scheduler(
self.observer, fields_file=fields_path, constraints=constraints)
self.logger.debug("Scheduler created")
except ImportError as e:
raise error.NotFound(msg=e)
else:
raise error.NotFound(
msg="Fields file does not exist: {}".format(fields_file))
| 36.966374 | 117 | 0.565395 |
502d3062441c74fa6be5920c100e63aa98d96463 | 387 | py | Python | Exercises/ex010_conversao.py | monique-tukaj/cursoemvideo-python | 79e1fd58ab5fc7d89970ff0586761767104de0e5 | [
"MIT"
] | null | null | null | Exercises/ex010_conversao.py | monique-tukaj/cursoemvideo-python | 79e1fd58ab5fc7d89970ff0586761767104de0e5 | [
"MIT"
] | null | null | null | Exercises/ex010_conversao.py | monique-tukaj/cursoemvideo-python | 79e1fd58ab5fc7d89970ff0586761767104de0e5 | [
"MIT"
] | null | null | null | #crie um programa que leia quanto dinheiro uma pessoa tem na carteira
#e mostre quantos euros ela pode comprar.
#cotacao atual
valor = float(input('Digite a quantidade de dinheiro que deseja trocar em euro: R$'))
conversao = valor / 6.26
print('Com R$ {:.2f} poderá comprar €{:.2f}.'.format(valor, conversao))
#pesquisar demais moedas e tentar construir um site de conversao de moedas. | 43 | 85 | 0.751938 |
d5e9dd01365a041dfd8f6df34941df56228397f3 | 7,476 | py | Python | control/tests/sisotool_test.py | berezhko/python-control | 78ec3eedd5a4a5f3d8409eec7c7f7e787793b357 | [
"BSD-3-Clause"
] | 1 | 2022-01-05T14:20:43.000Z | 2022-01-05T14:20:43.000Z | control/tests/sisotool_test.py | berezhko/python-control | 78ec3eedd5a4a5f3d8409eec7c7f7e787793b357 | [
"BSD-3-Clause"
] | null | null | null | control/tests/sisotool_test.py | berezhko/python-control | 78ec3eedd5a4a5f3d8409eec7c7f7e787793b357 | [
"BSD-3-Clause"
] | null | null | null | """sisotool_test.py"""
from control.exception import ControlMIMONotImplemented
import matplotlib.pyplot as plt
import numpy as np
from numpy.testing import assert_array_almost_equal
import pytest
from control.sisotool import sisotool, rootlocus_pid_designer
from control.rlocus import _RLClickDispatcher
from control.xferfcn import TransferFunction
from control.statesp import StateSpace
from control import c2d
@pytest.mark.usefixtures("mplcleanup")
class TestSisotool:
"""These are tests for the sisotool in sisotool.py."""
@pytest.fixture
def tsys(self, request):
"""Return a generic SISO transfer function"""
dt = getattr(request, 'param', 0)
return TransferFunction([1000], [1, 25, 100, 0], dt)
@pytest.fixture
def sys222(self):
"""2-states square system (2 inputs x 2 outputs)"""
A222 = [[4., 1.],
[2., -3]]
B222 = [[5., 2.],
[-3., -3.]]
C222 = [[2., -4],
[0., 1.]]
D222 = [[3., 2.],
[1., -1.]]
return StateSpace(A222, B222, C222, D222)
@pytest.fixture
def sys221(self):
"""2-states, 2 inputs x 1 output"""
A222 = [[4., 1.],
[2., -3]]
B222 = [[5., 2.],
[-3., -3.]]
C221 = [[0., 1.]]
D221 = [[1., -1.]]
return StateSpace(A222, B222, C221, D221)
def test_sisotool(self, tsys):
sisotool(tsys, Hz=False)
fig = plt.gcf()
ax_mag, ax_rlocus, ax_phase, ax_step = fig.axes[:4]
# Check the initial root locus plot points
initial_point_0 = (np.array([-22.53155977]), np.array([0.]))
initial_point_1 = (np.array([-1.23422011]), np.array([-6.54667031]))
initial_point_2 = (np.array([-1.23422011]), np.array([6.54667031]))
assert_array_almost_equal(ax_rlocus.lines[0].get_data(),
initial_point_0, 4)
assert_array_almost_equal(ax_rlocus.lines[1].get_data(),
initial_point_1, 4)
assert_array_almost_equal(ax_rlocus.lines[2].get_data(),
initial_point_2, 4)
# Check the step response before moving the point
step_response_original = np.array(
[0. , 0.0216, 0.1271, 0.3215, 0.5762, 0.8522, 1.1114, 1.3221,
1.4633, 1.5254])
assert_array_almost_equal(
ax_step.lines[0].get_data()[1][:10], step_response_original, 4)
bode_plot_params = {
'omega': None,
'dB': False,
'Hz': False,
'deg': True,
'omega_limits': None,
'omega_num': None,
'sisotool': True,
'fig': fig,
'margins': True
}
# Move the rootlocus to another point
event = type('test', (object,), {'xdata': 2.31206868287,
'ydata': 15.5983051046,
'inaxes': ax_rlocus.axes})()
_RLClickDispatcher(event=event, sys=tsys, fig=fig,
ax_rlocus=ax_rlocus, sisotool=True, plotstr='-',
bode_plot_params=bode_plot_params, tvect=None)
# Check the moved root locus plot points
moved_point_0 = (np.array([-29.91742755]), np.array([0.]))
moved_point_1 = (np.array([2.45871378]), np.array([-15.52647768]))
moved_point_2 = (np.array([2.45871378]), np.array([15.52647768]))
assert_array_almost_equal(ax_rlocus.lines[-3].get_data(),
moved_point_0, 4)
assert_array_almost_equal(ax_rlocus.lines[-2].get_data(),
moved_point_1, 4)
assert_array_almost_equal(ax_rlocus.lines[-1].get_data(),
moved_point_2, 4)
# Check if the bode_mag line has moved
bode_mag_moved = np.array(
[69.0065, 68.6749, 68.3448, 68.0161, 67.6889, 67.3631, 67.0388,
66.7159, 66.3944, 66.0743])
assert_array_almost_equal(ax_mag.lines[0].get_data()[1][10:20],
bode_mag_moved, 4)
# Check if the step response has changed
step_response_moved = np.array(
[0. , 0.0237, 0.1596, 0.4511, 0.884 , 1.3985, 1.9031, 2.2922,
2.4676, 2.3606])
assert_array_almost_equal(
ax_step.lines[0].get_data()[1][:10], step_response_moved, 4)
@pytest.mark.parametrize('tsys', [0, True],
indirect=True, ids=['ctime', 'dtime'])
def test_sisotool_tvect(self, tsys):
# test supply tvect
tvect = np.linspace(0, 1, 10)
sisotool(tsys, tvect=tvect)
fig = plt.gcf()
ax_rlocus, ax_step = fig.axes[1], fig.axes[3]
# Move the rootlocus to another point and confirm same tvect
event = type('test', (object,), {'xdata': 2.31206868287,
'ydata': 15.5983051046,
'inaxes': ax_rlocus.axes})()
_RLClickDispatcher(event=event, sys=tsys, fig=fig,
ax_rlocus=ax_rlocus, sisotool=True, plotstr='-',
bode_plot_params=dict(), tvect=tvect)
assert_array_almost_equal(tvect, ax_step.lines[0].get_data()[0])
def test_sisotool_mimo(self, sys222, sys221):
# a 2x2 should not raise an error:
sisotool(sys222)
# but 2 input, 1 output should
with pytest.raises(ControlMIMONotImplemented):
sisotool(sys221)
@pytest.mark.usefixtures("mplcleanup")
class TestPidDesigner:
@pytest.fixture
def plant(self, request):
plants = {
'syscont':TransferFunction(1,[1, 3, 0]),
'sysdisc1':c2d(TransferFunction(1,[1, 3, 0]), .1),
'syscont221':StateSpace([[-.3, 0],[1,0]],[[-1,],[.1,]], [0, -.3], 0)}
return plants[request.param]
# test permutations of system construction without plotting
@pytest.mark.parametrize('plant', ('syscont', 'sysdisc1', 'syscont221'), indirect=True)
@pytest.mark.parametrize('gain', ('P', 'I', 'D'))
@pytest.mark.parametrize('sign', (1,))
@pytest.mark.parametrize('input_signal', ('r', 'd'))
@pytest.mark.parametrize('Kp0', (0,))
@pytest.mark.parametrize('Ki0', (1.,))
@pytest.mark.parametrize('Kd0', (0.1,))
@pytest.mark.parametrize('tau', (0.01,))
@pytest.mark.parametrize('C_ff', (0, 1,))
@pytest.mark.parametrize('derivative_in_feedback_path', (True, False,))
@pytest.mark.parametrize("kwargs", [{'plot':False},])
def test_pid_designer_1(self, plant, gain, sign, input_signal, Kp0, Ki0, Kd0, tau, C_ff,
derivative_in_feedback_path, kwargs):
rootlocus_pid_designer(plant, gain, sign, input_signal, Kp0, Ki0, Kd0, tau, C_ff,
derivative_in_feedback_path, **kwargs)
# test creation of sisotool plot
# input from reference or disturbance
@pytest.mark.skip("Bode plot is incorrect; generates spurious warnings")
@pytest.mark.parametrize('plant', ('syscont', 'syscont221'), indirect=True)
@pytest.mark.parametrize("kwargs", [
{'input_signal':'r', 'Kp0':0.01, 'derivative_in_feedback_path':True},
{'input_signal':'d', 'Kp0':0.01, 'derivative_in_feedback_path':True},])
def test_pid_designer_2(self, plant, kwargs):
rootlocus_pid_designer(plant, **kwargs)
| 41.303867 | 92 | 0.570225 |
670cfe3b54c4dfee3548161a276452149bd4c3a3 | 409 | py | Python | rabbitmq/send.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | 2 | 2017-06-12T07:58:51.000Z | 2018-03-08T09:43:35.000Z | rabbitmq/send.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | 1 | 2017-06-10T02:05:52.000Z | 2017-07-04T03:57:28.000Z | rabbitmq/send.py | maxis1314/pyutils | 7e0666c650209155b3da186d09c54cf14825df1e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost',virtual_host='/'))
channel = connection.channel()
#channel.queue_declare(queue='hello')
channel.basic_publish(exchange=sys.argv[1],
routing_key=sys.argv[2],
body=sys.argv[3])
print(" [x] Sent 'Hello World!'")
connection.close()
| 25.5625 | 63 | 0.662592 |
d6570f0e27ff65076fc68cff27c367f59a7fe4f7 | 6,667 | py | Python | test/functional/txn_doublespend.py | wolfoxonly/bwb | aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10 | [
"MIT"
] | null | null | null | test/functional/txn_doublespend.py | wolfoxonly/bwb | aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10 | [
"MIT"
] | null | null | null | test/functional/txn_doublespend.py | wolfoxonly/bwb | aae01441cdc171ff7bbdc161b74b4eeb2f1b5a10 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bwbcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there is a double-spend conflict."""
from test_framework.test_framework import BwbcoinTestFramework
from test_framework.util import *
class TxnMallTest(BwbcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# First: use raw transaction API to send 1240 BTC to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransaction(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 BTC coin each
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219+tx1["amount"]+tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29+tx2["amount"]+tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"]+tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100BTC for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*"), expected)
# Final "" balance is starting_balance - amount moved to accounts - doublespend + subsidies +
# fees (which are negative)
assert_equal(self.nodes[0].getbalance("foo"), 1219)
assert_equal(self.nodes[0].getbalance("bar"), 29)
assert_equal(self.nodes[0].getbalance(""), starting_balance
-1219
- 29
-1240
+ 100
+ fund_foo_tx["fee"]
+ fund_bar_tx["fee"]
+ doublespend_fee)
# Node1's "from0" account balance should be just the doublespend:
assert_equal(self.nodes[1].getbalance("from0"), 1240)
if __name__ == '__main__':
TxnMallTest().main()
| 46.298611 | 111 | 0.60597 |
459b171e051f15e853b5f24e850d1f61ebb89896 | 8,369 | py | Python | espaloma/data/md.py | jstaker7/espaloma | d80d280acd608dc04c93966afe15cc3cb74f65a8 | [
"MIT"
] | null | null | null | espaloma/data/md.py | jstaker7/espaloma | d80d280acd608dc04c93966afe15cc3cb74f65a8 | [
"MIT"
] | null | null | null | espaloma/data/md.py | jstaker7/espaloma | d80d280acd608dc04c93966afe15cc3cb74f65a8 | [
"MIT"
] | null | null | null | # =============================================================================
# IMPORTS
# =============================================================================
import numpy as np
import torch
from openforcefield.typing.engines.smirnoff import ForceField
from simtk import openmm, unit
from simtk.openmm.app import Simulation
from simtk.unit.quantity import Quantity
from espaloma.units import *
import espaloma as esp
# =============================================================================
# CONSTANTS
# =============================================================================
# simulation specs
TEMPERATURE = 500 * unit.kelvin
STEP_SIZE = 1 * unit.femtosecond
COLLISION_RATE = 1 / unit.picosecond
# =============================================================================
# MODULE FUNCTIONS
# =============================================================================
def subtract_nonbonded_force(
g, forcefield="test_forcefields/smirnoff99Frosst.offxml",
):
# get the forcefield from str
if isinstance(forcefield, str):
forcefield = ForceField(forcefield)
# partial charge
g.mol.assign_partial_charges("gasteiger") # faster
# parametrize topology
topology = g.mol.to_topology()
# create openmm system
system = forcefield.create_openmm_system(
topology, charge_from_molecules=[g.mol],
)
# use langevin integrator, although it's not super useful here
integrator = openmm.LangevinIntegrator(
TEMPERATURE, COLLISION_RATE, STEP_SIZE
)
# create simulation
simulation = Simulation(
topology=topology, system=system, integrator=integrator
)
# get forces
forces = list(system.getForces())
# loop through forces
for force in forces:
name = force.__class__.__name__
# turn off angle
if "Angle" in name:
for idx in range(force.getNumAngles()):
id1, id2, id3, angle, k = force.getAngleParameters(idx)
force.setAngleParameters(idx, id1, id2, id3, angle, 0.0)
elif "Bond" in name:
for idx in range(force.getNumBonds()):
id1, id2, length, k = force.getBondParameters(idx)
force.setBondParameters(
idx, id1, id2, length, 0.0,
)
elif "Torsion" in name:
for idx in range(force.getNumTorsions()):
(
id1,
id2,
id3,
id4,
periodicity,
phase,
k,
) = force.getTorsionParameters(idx)
force.setTorsionParameters(
idx, id1, id2, id3, id4, periodicity, phase, 0.0,
)
force.updateParametersInContext(simulation.context)
# the snapshots
xs = (
Quantity(
g.nodes["n1"].data["xyz"].detach().numpy(),
esp.units.DISTANCE_UNIT,
)
.value_in_unit(unit.nanometer)
.transpose((1, 0, 2))
)
# loop through the snapshots
energies = []
derivatives = []
for x in xs:
simulation.context.setPositions(x)
state = simulation.context.getState(
getEnergy=True, getParameters=True, getForces=True,
)
energy = state.getPotentialEnergy().value_in_unit(
esp.units.ENERGY_UNIT,
)
derivative = state.getForces(asNumpy=True).value_in_unit(
esp.units.FORCE_UNIT,
)
energies.append(energy)
derivatives.append(derivative)
# put energies to a tensor
energies = torch.tensor(
energies, dtype=torch.get_default_dtype(),
).flatten()[None, :]
derivatives = torch.tensor(
np.stack(derivatives, axis=1), dtype=torch.get_default_dtype(),
)
# subtract the energies
g.heterograph.apply_nodes(
lambda node: {"u_ref": node.data["u_ref"] - energies}, ntype="g",
)
g.heterograph.apply_nodes(
lambda node: {"u_ref_prime": node.data["u_ref_prime"] - derivatives},
ntype="n1",
)
return g
# =============================================================================
# MODULE CLASSES
# =============================================================================
class MoleculeVacuumSimulation(object):
""" Simluate a single molecule system in vaccum.
Parameters
----------
g : `espaloma.Graph`
Input molecular graph.
n_samples : `int`
Number of samples to collect.
n_steps_per_sample : `int`
Number of steps between each sample.
temperature : `float * unit.kelvin`
Temperature for the simluation.
collision_rate : `float / unit.picosecond`
Collision rate.
timestep : `float * unit.femtosecond`
Time step.
Methods
-------
simulation_from_graph : Create simluation from molecule.
run : Run the simluation.
"""
def __init__(
self,
forcefield="test_forcefields/smirnoff99Frosst.offxml",
n_samples=100,
n_steps_per_sample=1000,
temperature=TEMPERATURE,
collision_rate=COLLISION_RATE,
step_size=STEP_SIZE,
):
self.n_samples = n_samples
self.n_steps_per_sample = n_steps_per_sample
self.temperature = temperature
self.collision_rate = collision_rate
self.step_size = step_size
if isinstance(forcefield, str):
self.forcefield = ForceField(forcefield)
else:
# TODO: type assertion
self.forcefield = forcefield
def simulation_from_graph(self, g):
""" Create simulation from moleucle """
# assign partial charge
g.mol.assign_partial_charges("gasteiger") # faster
# parameterize topology
topology = g.mol.to_topology()
# create openmm system
system = self.forcefield.create_openmm_system(
topology,
# TODO:
# figure out whether `sqm` should be so slow
charge_from_molecules=[g.mol],
)
# use langevin integrator
integrator = openmm.LangevinIntegrator(
self.temperature, self.collision_rate, self.step_size
)
# initialize simulation
simulation = Simulation(
topology=topology, system=system, integrator=integrator
)
import openforcefield
# get conformer
g.mol.generate_conformers(
toolkit_registry=openforcefield.utils.RDKitToolkitWrapper(),
)
# put conformer in simulation
simulation.context.setPositions(g.mol.conformers[0])
# minimize energy
simulation.minimizeEnergy()
# set velocities
simulation.context.setVelocitiesToTemperature(self.temperature)
return simulation
def run(self, g, in_place=True):
""" Collect samples from simulation.
Parameters
----------
g : `esp.Graph`
Input graph.
in_place : `bool`
If ture,
Returns
-------
samples : `torch.Tensor`, `shape=(n_samples, n_nodes, 3)`
`in_place=True`
Sample.
graph : `esp.Graph`
Modified graph.
"""
# build simulation
simulation = self.simulation_from_graph(g)
# initialize empty list for samples.
samples = []
# loop through number of samples
for _ in range(self.n_samples):
# run MD for `self.n_steps_per_sample` steps
simulation.step(self.n_steps_per_sample)
# append samples to `samples`
samples.append(
simulation.context.getState(getPositions=True)
.getPositions(asNumpy=True)
.value_in_unit(DISTANCE_UNIT)
)
# put samples into an array
samples = np.array(samples)
# put samples into tensor
samples = torch.tensor(samples, dtype=torch.float32)
if in_place is True:
g.heterograph.nodes["n1"].data["xyz"] = samples.permute(1, 0, 2)
# require gradient for force matching
g.heterograph.nodes["n1"].data["xyz"].requires_grad = True
return g
return samples
| 27.989967 | 79 | 0.549528 |
5c5887db7a5bd54d36c42b5539a4328f8ad387cd | 11,736 | py | Python | prototypes/test/ukanren.py | fkberthold/tweedle | 6c6d21ad72143c75df860202556e4dc553a8b315 | [
"MIT"
] | 1 | 2019-01-22T06:42:51.000Z | 2019-01-22T06:42:51.000Z | prototypes/test/ukanren.py | fkberthold/tweedle | 6c6d21ad72143c75df860202556e4dc553a8b315 | [
"MIT"
] | null | null | null | prototypes/test/ukanren.py | fkberthold/tweedle | 6c6d21ad72143c75df860202556e4dc553a8b315 | [
"MIT"
] | null | null | null | import os.path
import sys
import unittest
from microkanren.ukanren import *
from microkanren.macro import macros, conj, disj, goal, call
empty_state = State()
one_value = State({LVar(0):'hi'})
two_values = State({LVar(0):'hi', LVar(1):'bye'})
value_reference = State({LVar(0):LVar(1), LVar(1):'bye'})
identical_values = State({LVar(0):'hi', LVar(1):'hi'})
@goal
def isTeaOrCake(x):
with disj:
Eq(x, 'tea')
Eq(x, 'cake')
class Test_Fixtures(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.var1 = LVar()
cls.var2 = LVar()
cls.dinner_party = ['The Walrus', 'The Carpenter']
cls.tea_party = ['Mad Hatter', 'March Hare', 'The Dormouse']
cls.combatants = {'Dee', 'Dum', 'Raven'}
cls.changes = {'drink me':'smaller', 'eat me':'bigger'}
class Test_LVar(Test_Fixtures):
def test_increment_id(self):
oldId = LVar.nextId
newVar = LVar()
newId = LVar.nextId
self.assertEqual(oldId, newVar.id)
self.assertEqual(oldId + 1, newId)
def test_var_without_name(self):
oldId = LVar.nextId
newVar = LVar()
self.assertIsNone(newVar.name)
self.assertEqual(newVar.id, oldId)
self.assertEqual(str(newVar), "%i" % newVar.id)
def test_var_with_name(self):
oldId = LVar.nextId
newVar = LVar("White Rabbit")
self.assertEqual(newVar.name, "White Rabbit")
self.assertEqual(newVar.id, oldId)
self.assertEqual(str(newVar), "%s(%i)" % (newVar.name, newVar.id))
def test_vars_only_equal_if_id_equal(self):
newVar1 = LVar("Red Rose")
newVar2 = LVar("White Rose")
self.assertNotEqual(newVar1, newVar2)
class Test_State(Test_Fixtures):
def test_valid_empty(self):
newState = State()
self.assertEqual(newState.substitution, {})
self.assertTrue(newState.valid)
self.assertEqual(len(newState), 0)
self.assertEqual(str(newState), "Substitutions:\n")
def test_valid_with_sub(self):
var = LVar()
newState = State({var: "Alice"})
self.assertEqual(newState.substitution, {var: "Alice"})
self.assertTrue(newState.valid)
self.assertEqual(len(newState), 1)
self.assertEqual(str(newState), "Substitutions:\n %s: %s\n" % (str(var), "'Alice'"))
def test_invalid(self):
newState = State(valid=False)
self.assertFalse(newState.valid)
self.assertEqual(str(newState), "State Invalid")
class Test_Fail(Test_Fixtures):
def test_valid_state_fails(self):
newState = State()
result = list(Fail().run(newState))
self.assertEqual(result, [])
def test_invalid_state_fails(self):
newState = State(valid=False)
result = list(Fail().run(newState))
self.assertEqual(result, [])
class Test_Succeed(Test_Fixtures):
def test_valid_state_succeeds(self):
newState = State()
result = list(Succeed().run(newState))
self.assertEqual(result, [newState])
def test_invalid_state_fails(self):
newState = State(valid=False)
result = list(Succeed().run(newState))
self.assertEqual(result, [])
class Test_Eq(Test_Fixtures):
def test_repr(self):
result = Eq('Roses','Roses')
self.assertEqual(str(result), "Eq('Roses','Roses')")
def test_tautology(self):
result = list(Eq(0,0).run())
self.assertEqual(result, [State()])
def test_same_variable(self):
result = list(Eq(self.var1, self.var1).run())
self.assertEqual(result, [State()])
def test_not_eq(self):
result = list(Eq('Red Roses','White Roses').run())
self.assertEqual(result, [])
def test_var_on_left(self):
result = list(Eq(self.var1, 'Roses').run())
self.assertEqual(result[0][self.var1], 'Roses')
def test_var_on_right(self):
result = list(Eq('Roses', self.var1).run())
self.assertEqual(result[0][self.var1], 'Roses')
def test_var_on_both(self):
result = list(Eq(self.var1, self.var2).run())
self.assertEqual(result[0][self.var1], self.var2)
def test_is_list(self):
result = list(Eq(self.var1, self.tea_party).run())
self.assertEqual(result[0][self.var1], self.tea_party)
def test_in_list_head(self):
result = list(Eq([self.var1, 'March Hare', 'The Dormouse'], self.tea_party).run())
self.assertEqual(result[0][self.var1], 'Mad Hatter')
def test_in_list_tail(self):
result = list(Eq(['Mad Hatter', self.var1, 'The Dormouse'], self.tea_party).run())
self.assertEqual(result[0][self.var1], 'March Hare')
def test_is_dictionary(self):
result = list(Eq(self.var1, self.changes).run())
self.assertEqual(result[0][self.var1], self.changes)
def test_is_dictionary_key(self):
result = list(Eq({self.var1:'smaller', 'eat me':'bigger'}, self.changes).run())
self.assertEqual(result[0][self.var1], 'drink me')
def test_is_dictionary_value(self):
result = list(Eq({'drink me':self.var1, 'eat me':'bigger'}, self.changes).run())
self.assertEqual(result[0][self.var1], 'smaller')
def test_is_dictionary_ambiguous_key(self):
result = list(Eq({self.var1:'what', self.var2:'what'}, {'I eat':'what', 'I see':'what'}).run())
self.assertEqual(len(result), 2)
self.assertIn(State({self.var1:'I eat', self.var2:'I see'}), result)
self.assertIn(State({self.var1:'I see', self.var2:'I eat'}), result)
def test_is_set(self):
result = list(Eq(self.var1, self.combatants).run())
self.assertEqual(result[0][self.var1], self.combatants)
def test_in_set(self):
result = list(Eq({'Dee', self.var1, 'Dum'}, self.combatants).run())
self.assertEqual(result[0][self.var1], 'Raven')
def test_is_set_ambiguous(self):
result = list(Eq({self.var1, self.var2}, {'Dee', 'Dum'}).run())
self.assertEqual(len(result), 2)
self.assertIn(State({self.var1:'Dee', self.var2:'Dum'}), result)
self.assertIn(State({self.var1:'Dum', self.var2:'Dee'}), result)
class Test_Conj(Test_Fixtures):
def test_empty(self):
goal = Conj()
result = list(goal.run())
self.assertEqual(str(goal), "EMPTY CONJ")
self.assertEqual(result, [State()])
def test_just_one_valid(self):
result = list(Conj(Eq('Alice','Alice')).run())
self.assertEqual(result, [State()])
def test_just_one_invalid(self):
result = list(Conj(Eq('Alice','Dinah')).run())
self.assertEqual(result, [])
def test_two_without_branching(self):
with conj(traveller), conj as testConj:
Eq(traveller, self.var1)
Eq(traveller, 'Alice')
result = list(testConj.run())
self.assertEqual(len(result), 1)
self.assertEqual(result[0][self.var1], 'Alice')
def test_two_amper_to_string(self):
testConj = Eq(self.var1, 'Alice') & Eq(self.var2, 'Dinah')
self.assertEqual(str(testConj), "Eq(%s,'Alice') & Eq(%s,'Dinah')" % (str(self.var1), str(self.var2)))
def test_two_amper_without_branching(self):
testConj = Fresh(lambda cat: Eq(cat, self.var1) & Eq(cat, 'Cheshire'))
result = list(testConj.run())
self.assertEqual(len(result), 1)
self.assertEqual(result[0][self.var1], 'Cheshire')
def test_two_with_branching(self):
with conj(rattle, dum), conj as testConj:
with disj:
Eq(rattle, 'spoiled')
Eq(rattle, 'spoilt')
Eq(rattle, self.var2)
Eq(dum, 'angry')
Eq(dum, self.var1)
result = list(testConj.run())
self.assertEqual(len(result), 2)
for r in result:
self.assertEqual(r[self.var1], 'angry')
isSpoiled = [st for st in result if st[self.var2] == 'spoiled']
isSpoilt = [st for st in result if st[self.var2] == 'spoilt']
self.assertEqual(len(isSpoiled), 1)
self.assertEqual(len(isSpoilt), 1)
class Test_Disj(Test_Fixtures):
def test_empty(self):
goal = Disj()
result = list(goal.run())
self.assertEqual(str(goal), "EMPTY DISJ")
self.assertEqual(result, [])
def test_just_one_valid(self):
result = list(Disj(Eq('Queen','Queen')).run())
self.assertEqual(result, [State()])
def test_just_one_invalid(self):
result = list(Disj(Eq('Pawn','Queen')).run())
self.assertEqual(result, [])
def test_two(self):
with disj as testDisj:
Eq(self.var1, 'Walrus')
Eq(self.var1, 'Carpenter')
result = list(testDisj.run())
self.assertEqual(len(result), 2)
hasWalrus = [st for st in result if st[self.var1] == 'Walrus']
hasCarpenter = [st for st in result if st[self.var1] == 'Carpenter']
self.assertEqual(len(hasWalrus), 1)
self.assertEqual(len(hasCarpenter), 1)
def test_two_operator(self):
testDisj = (Eq(self.var1, 'Walrus') | Eq(self.var1, 'Carpenter'))
self.assertEqual(str(testDisj), "Eq(%s,'Walrus') | Eq(%s,'Carpenter')" % (str(self.var1), str(self.var1)))
result = list(testDisj.run())
self.assertEqual(len(result), 2)
has3 = [st for st in result if st[self.var1] == 'Walrus']
has4 = [st for st in result if st[self.var1] == 'Carpenter']
self.assertEqual(len(has3), 1)
self.assertEqual(len(has4), 1)
def test_get_first(self):
with disj as testDisj:
Eq(self.var1, 'Walrus')
Eq(self.var1, 'Carpenter')
result = list(testDisj.run(results=1))
self.assertEqual(len(result), 1)
self.assertEqual(result[0][self.var1], 'Walrus')
class Test_Fresh(Test_Fixtures):
def test_empty_fresh(self):
result = list(Fresh(lambda: Eq('King','King')).run())
self.assertEqual(result, [State()])
def test_one_fresh(self):
next_var_id = LVar.nextId
goal = Fresh(lambda x: Eq(x,'King'))
self.assertEqual(str(goal), "Fresh [x(%i)]: <class 'microkanren.ukanren.Eq'>" % next_var_id)
result = list(goal.run())
self.assertEqual(len(result), 1)
self.assertEqual(len(result[0].substitution), 1)
var = list(result[0].substitution.keys())[0]
self.assertEqual(var.name, 'x')
class Test_Call(Test_Fixtures):
def test_empty_call(self):
result = list(Call(lambda: Eq('Knight','Knight')).run())
self.assertEqual(result, [State()])
def test_call_block(self):
with call(x), call as callTest:
with disj:
Eq(x, 'Treacle')
Eq(x, 'Honey')
result = list(callTest.run())
self.assertEqual(len(result), 2)
var = [key for key in result[0].substitution.keys() if key.name == 'x'][0]
hasTreacle = [st for st in result if st[var] == 'Treacle']
hasHoney = [st for st in result if st[var] == 'Honey']
self.assertEqual(len(hasTreacle), 1)
self.assertEqual(len(hasHoney), 1)
class Test_Goal(Test_Fixtures):
def test_one_var(self):
result = list(isTeaOrCake(self.var1).run())
self.assertEqual(len(result), 2)
hasTea = [st for st in result if st[self.var1] == 'tea']
hasCake = [st for st in result if st[self.var1] == 'cake']
self.assertEqual(len(hasTea), 1)
self.assertEqual(len(hasCake), 1)
if __name__ == "__main__":
print("This test suite depends on macros to execute, so can't be")
print("run independently. Please either run it through `run_tests.py`")
print("above, or by importing it in a separate file.")
| 36.222222 | 114 | 0.612304 |
888b46443b3519711ba20f6d47fb4776e6321675 | 1,880 | py | Python | python/b_a_variables_declare.py | IVBakker/learn | e0a4249459798b2c4e715a14a801ca53feb3cc64 | [
"MIT"
] | 1 | 2019-07-09T15:22:28.000Z | 2019-07-09T15:22:28.000Z | python/b_a_variables_declare.py | IVBakker/learn | e0a4249459798b2c4e715a14a801ca53feb3cc64 | [
"MIT"
] | null | null | null | python/b_a_variables_declare.py | IVBakker/learn | e0a4249459798b2c4e715a14a801ca53feb3cc64 | [
"MIT"
] | null | null | null | # Showing message while your code is running is really important
# It helps debugging and understanding what's happening
print(f"HELLO")
# START
# "XX = <VALUE>" announce that we need a new variable that is going to be called XX with value.
# The name of a variable is only for the coder, it makes no changes to the program
print("") # SPACING
# NOTHING
my_var_nothing = None
#BOOLEAN
my_var_bool = True
my_var_bool = False
print("") # SPACING
# INTEGERS
my_var_int = 1234 #my_now contains the value 1234 inside, an int (integer)
my_var_int = 1235 #now my_contains the value 1235, there's no way to get back the previous value 1234
print(f"my_var_int: {my_var_int}")
my_var_int=10.5 #these are "float" number, decimal numbers
print(f"my_var_int: {my_var_int}")
# STRING
my_var_string = "This is a string" # in Python "TEXT" and 'TEXT' is the same. It's for strings
print(f"my_var_string: {my_var_string}")
print(f"my_var_string[0]: {my_var_string[0]}")
print(f"my_var_string[1]: {my_var_string[1]}")
# print(f"my_var_int[1]: {my_var_int[1]}") # BREAKS
print("") # SPACING
########################
# CONTAINERS
########################
# ARRAY
my_var_array = [1, "TOTO", 7, 4, 5, "HELLO"]
print(f"my_var_array: {my_var_array}")
print(f"my_var_array[0]: {my_var_array[0]}")
print(f"my_var_array[1]: {my_var_array[1]}")
print(f"my_var_array[2]: {my_var_array[2]}")
print(f"my_var_array[-1]: {my_var_array[-1]}")
# print(f"my_var_array[100]: {my_var_array[100]}") # BREAKS
print("") # SPACING
# Dictionnary/dict/object
my_var_dict = {
1: "int_1",
"key_1": "value_1",
"Francois": "Regnoult"
}
print(f"my_var_dict: {my_var_dict}")
print(f"my_var_dict['key_1']: {my_var_dict['key_1']}")
my_accessor = "key_1"
print(f"my_var_dict[my_accessor]: {my_var_dict[my_accessor]}")
print(my_var_dict["key_2"])
# print(f"my_var_dict['key_2']: {my_var_dict['key_2']}") # BREAKS
| 28.484848 | 101 | 0.697872 |
61cb229713f1b1827e5988834b461f31ae09c00e | 1,548 | py | Python | manga_py/providers/comico_jp.py | tgaugry/manga-py | d0be627e5425020053b4fb117a9b3f0d79b20c9e | [
"MIT"
] | null | null | null | manga_py/providers/comico_jp.py | tgaugry/manga-py | d0be627e5425020053b4fb117a9b3f0d79b20c9e | [
"MIT"
] | null | null | null | manga_py/providers/comico_jp.py | tgaugry/manga-py | d0be627e5425020053b4fb117a9b3f0d79b20c9e | [
"MIT"
] | null | null | null | from sys import stderr
from manga_py.provider import Provider
from .helpers.std import Std
class ComicoJp(Provider, Std):
def get_chapter_index(self) -> str:
idx = self.re.search(r'articleNo=(\d+)', self.chapter)
if idx:
return '{}-{}'.format(self.chapter_id, idx.group(1))
return str(self.chapter_id)
def get_content(self):
title_no = self.re.search(r'\.\w{2,7}/.+titleNo=(\d+)', self.get_url())
if title_no:
content = self.http_post('{}/api/getArticleList.nhn'.format(self.domain), data={
'titleNo': title_no.group(1)
})
try:
return self.json.loads(content).get('result', {}).get('list', [])
except TypeError:
pass
return []
def get_manga_name(self):
content = self.http_get(self.get_url())
name = self.text_content(content, 'title')
return name[:name.rfind('|')].strip(' \n\t\r')
def get_chapters(self):
# TODO: see i['freeFlg'] Y = true, W = false #19
items = [i['articleDetailUrl'] for i in self.content if i['freeFlg'] == 'Y']
self.log('Free chapters count: %d' % len(items), file=stderr)
return items[::-1]
def get_files(self):
items = self.html_fromstring(self.chapter, '.comic-image._comicImage > img.comic-image__image')
return [i.get('src') for i in items]
def get_cover(self):
pass
def book_meta(self) -> dict:
# todo meta
pass
main = ComicoJp
| 30.352941 | 103 | 0.575581 |
015f2b948c2141df821e06d4ac45c8147c3da327 | 392 | py | Python | flask_react_template/server/config/base.py | victor-iyiola/flask-react-template | 7b6d0363499908204fb3db0f519c8dfbf0a805d9 | [
"MIT"
] | 1 | 2018-01-31T17:19:09.000Z | 2018-01-31T17:19:09.000Z | flask_react_template/server/config/base.py | victor-iyiola/flask-react-template | 7b6d0363499908204fb3db0f519c8dfbf0a805d9 | [
"MIT"
] | null | null | null | flask_react_template/server/config/base.py | victor-iyiola/flask-react-template | 7b6d0363499908204fb3db0f519c8dfbf0a805d9 | [
"MIT"
] | null | null | null | """
@author Victor I. Afolabi
A.I. Engineer & Software developer
javafolabi@gmail.com
Created on 31 January, 2018 @ 4:56 PM.
Copyright © 2018. Victor. All rights reserved.
"""
from passlib.hash import sha256_crypt
from flask_react_template.settings import APP_NAME
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = sha256_crypt.encrypt(APP_NAME)
| 20.631579 | 50 | 0.721939 |
c8e0bcc54e2d2e2e536050d607b16d13b8dd71ed | 1,002 | py | Python | _filter.py | prajjwal1/nlp_simple_qa | 340282feb69b133567be833a8ec19171a9ab1fd6 | [
"Apache-2.0"
] | null | null | null | _filter.py | prajjwal1/nlp_simple_qa | 340282feb69b133567be833a8ec19171a9ab1fd6 | [
"Apache-2.0"
] | 5 | 2021-11-30T01:10:42.000Z | 2021-11-30T01:11:09.000Z | _filter.py | prajjwal1/nlp_simple_qa | 340282feb69b133567be833a8ec19171a9ab1fd6 | [
"Apache-2.0"
] | null | null | null | import pickle
import re
def filter_query(query):
words_to_remove = ["how", "when", "what", "was", "the", "of", "the", "for", "whence", "\""]
chars_to_remove = ['"', '?', '.', "‟", "„", "“", "’", "“", "”"]
query = ' '.join([x for x in query.split() if x not in words_to_remove])
s = re.sub(r'\W+', '', query)
for char in chars_to_remove:
query = query.replace(char, "")
return query
def standardize_query(query):
query = query.lower()
return query
def filter_query_retrieval(query):
query = standardize_query(query)
with open("dict.pkl", "rb") as f:
freq_stats = pickle.load(f)
freqs_each_token = []
query = filter_query(query)
for val in query.split():
freqs_each_token.append(freq_stats[val])
query_list = query.split()
first_max = freqs_each_token.index(min(freqs_each_token))
# second_max = freqs_each_token.index(sorted(freqs_each_token)[1])
return query_list[first_max] #, query_list[second_max]
| 29.470588 | 95 | 0.625749 |
c1ef1485dd7f15be6728ac720df77a08cc43235d | 104,929 | bzl | Python | dotnet/stdlib.net/net47/generated.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | dotnet/stdlib.net/net47/generated.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | dotnet/stdlib.net/net47/generated.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | load("@rules_mono//dotnet/private:rules/stdlib.bzl", "net_stdlib")
def define_stdlib(context_data):
net_stdlib(
name = "accessibility.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Accessibility.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Accessibility.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "custommarshalers.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/CustomMarshalers.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/CustomMarshalers.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "isymwrapper.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/ISymWrapper.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/ISymWrapper.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "microsoft.activities.build.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Activities.Build.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Activities.Build.dll",
deps = [
":mscorlib.dll",
":xamlbuildtask.dll",
":system.xaml.dll",
":system.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
":system.activities.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "microsoft.build.conversion.v4.0.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Conversion.v4.0.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Conversion.v4.0.dll",
deps = [
":mscorlib.dll",
":microsoft.build.dll",
":system.dll",
":microsoft.build.engine.dll",
":system.core.dll",
]
)
net_stdlib(
name = "microsoft.build.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.dll",
deps = [
":mscorlib.dll",
":system.dll",
":microsoft.build.framework.dll",
":system.core.dll",
":microsoft.build.engine.dll",
]
)
net_stdlib(
name = "microsoft.build.engine.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Engine.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Engine.dll",
deps = [
":mscorlib.dll",
":system.dll",
":microsoft.build.framework.dll",
]
)
net_stdlib(
name = "microsoft.build.framework.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Framework.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Framework.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.dll",
]
)
net_stdlib(
name = "microsoft.build.tasks.v4.0.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Tasks.v4.0.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Tasks.v4.0.dll",
deps = [
":mscorlib.dll",
":system.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
":system.core.dll",
":system.security.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "microsoft.build.utilities.v4.0.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Utilities.v4.0.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.Build.Utilities.v4.0.dll",
deps = [
":mscorlib.dll",
":microsoft.build.framework.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "microsoft.csharp.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.CSharp.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.CSharp.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
":system.dynamic.dll",
]
)
net_stdlib(
name = "microsoft.jscript.dll",
version = "10.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.JScript.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.JScript.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "microsoft.visualbasic.compatibility.data.dll",
version = "10.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.Compatibility.Data.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.Compatibility.Data.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
":microsoft.visualbasic.dll",
":microsoft.visualbasic.compatibility.dll",
":system.security.dll",
]
)
net_stdlib(
name = "microsoft.visualbasic.compatibility.dll",
version = "10.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.Compatibility.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.Compatibility.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":microsoft.visualbasic.dll",
]
)
net_stdlib(
name = "microsoft.visualbasic.dll",
version = "10.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualBasic.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.deployment.dll",
":system.management.dll",
":system.core.dll",
":system.xml.linq.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "microsoft.visualc.dll",
version = "10.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualC.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualC.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "microsoft.visualc.stlclr.dll",
version = "2.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualC.STLCLR.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Microsoft.VisualC.STLCLR.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "mscorlib.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/mscorlib.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/mscorlib.dll",
deps = [
]
)
net_stdlib(
name = "presentationbuildtasks.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationBuildTasks.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationBuildTasks.dll",
deps = [
":mscorlib.dll",
":system.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
]
)
net_stdlib(
name = "presentationcore.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationCore.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationCore.dll",
deps = [
":mscorlib.dll",
":system.dll",
":windowsbase.dll",
":system.xaml.dll",
":uiautomationtypes.dll",
":system.windows.input.manipulations.dll",
":uiautomationprovider.dll",
":system.deployment.dll",
]
)
net_stdlib(
name = "presentationframework.aero.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Aero.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Aero.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "presentationframework.aero2.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Aero2.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Aero2.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "presentationframework.aerolite.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.AeroLite.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.AeroLite.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "presentationframework.classic.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Classic.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Classic.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "presentationframework.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.core.dll",
":uiautomationprovider.dll",
":uiautomationtypes.dll",
":reachframework.dll",
":accessibility.dll",
":system.deployment.dll",
]
)
net_stdlib(
name = "presentationframework.luna.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Luna.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Luna.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "presentationframework.royale.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Royale.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/PresentationFramework.Royale.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":system.dll",
":presentationcore.dll",
":system.xaml.dll",
]
)
net_stdlib(
name = "reachframework.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/ReachFramework.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/ReachFramework.dll",
deps = [
":mscorlib.dll",
":presentationcore.dll",
":windowsbase.dll",
":system.dll",
":system.drawing.dll",
":system.security.dll",
]
)
net_stdlib(
name = "sysglobl.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/sysglobl.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/sysglobl.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.activities.core.presentation.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.Core.Presentation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.Core.Presentation.dll",
deps = [
":mscorlib.dll",
":system.dll",
":windowsbase.dll",
":system.activities.presentation.dll",
":system.xaml.dll",
":presentationcore.dll",
":system.activities.dll",
":system.servicemodel.activities.dll",
":system.xml.linq.dll",
":system.core.dll",
":system.runtime.serialization.dll",
":system.windows.presentation.dll",
]
)
net_stdlib(
name = "system.activities.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.core.dll",
":system.dll",
":system.xml.linq.dll",
":system.runtime.serialization.dll",
":system.runtime.durableinstancing.dll",
":microsoft.visualbasic.dll",
]
)
net_stdlib(
name = "system.activities.durableinstancing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.DurableInstancing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.DurableInstancing.dll",
deps = [
":mscorlib.dll",
":system.runtime.durableinstancing.dll",
":system.xml.linq.dll",
":system.activities.dll",
":system.core.dll",
":system.runtime.serialization.dll",
":system.dll",
]
)
net_stdlib(
name = "system.activities.presentation.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.Presentation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Activities.Presentation.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.dll",
":windowsbase.dll",
":presentationcore.dll",
":system.activities.dll",
":system.core.dll",
":system.xml.linq.dll",
":system.drawing.dll",
":windowsformsintegration.dll",
":uiautomationprovider.dll",
":uiautomationtypes.dll",
":reachframework.dll",
":system.servicemodel.activities.dll",
":system.componentmodel.composition.dll",
]
)
net_stdlib(
name = "system.addin.contract.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.AddIn.Contract.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.AddIn.Contract.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.addin.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.AddIn.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.AddIn.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.addin.contract.dll",
]
)
net_stdlib(
name = "system.componentmodel.composition.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.Composition.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.Composition.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
]
)
net_stdlib(
name = "system.componentmodel.composition.registration.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.Composition.Registration.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.Composition.Registration.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.componentmodel.composition.dll",
":system.core.dll",
":system.reflection.context.dll",
]
)
net_stdlib(
name = "system.componentmodel.dataannotations.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.DataAnnotations.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ComponentModel.DataAnnotations.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.configuration.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Configuration.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Configuration.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.security.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.configuration.install.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Configuration.Install.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Configuration.Install.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.core.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Core.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Core.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.numerics.dll",
":system.security.dll",
]
)
net_stdlib(
name = "system.data.datasetextensions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.DataSetExtensions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.DataSetExtensions.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.data.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.numerics.dll",
":system.runtime.caching.dll",
":system.core.dll",
":system.enterpriseservices.dll",
]
)
net_stdlib(
name = "system.data.entity.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Entity.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Entity.Design.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.data.entity.dll",
":system.core.dll",
":system.xml.linq.dll",
":system.data.datasetextensions.dll",
]
)
net_stdlib(
name = "system.data.entity.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Entity.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Entity.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
":system.runtime.serialization.dll",
":system.componentmodel.dataannotations.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.data.linq.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Linq.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Linq.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
":system.runtime.serialization.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.data.oracleclient.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.OracleClient.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.OracleClient.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.enterpriseservices.dll",
]
)
net_stdlib(
name = "system.data.services.client.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.Client.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.Client.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.data.services.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.Design.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
":system.data.entity.dll",
":system.data.services.client.dll",
":system.xml.linq.dll",
":system.web.extensions.dll",
]
)
net_stdlib(
name = "system.data.services.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.Services.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
":system.data.services.client.dll",
":system.servicemodel.web.dll",
":system.servicemodel.activation.dll",
":system.runtime.serialization.dll",
":system.data.entity.dll",
":system.xml.linq.dll",
":system.data.linq.dll",
]
)
net_stdlib(
name = "system.data.sqlxml.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.SqlXml.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Data.SqlXml.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.deployment.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Deployment.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Deployment.dll",
deps = [
":mscorlib.dll",
":system.security.dll",
":system.dll",
":system.drawing.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Design.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.data.oracleclient.dll",
":accessibility.dll",
":system.drawing.design.dll",
":system.web.regularexpressions.dll",
":system.runtime.serialization.formatters.soap.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.device.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Device.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Device.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.directoryservices.accountmanagement.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.AccountManagement.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.AccountManagement.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.directoryservices.dll",
":system.directoryservices.protocols.dll",
]
)
net_stdlib(
name = "system.directoryservices.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.directoryservices.protocols.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.Protocols.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.DirectoryServices.Protocols.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.directoryservices.dll",
]
)
net_stdlib(
name = "system.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.drawing.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Drawing.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Drawing.Design.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
]
)
net_stdlib(
name = "system.drawing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Drawing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Drawing.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.dynamic.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Dynamic.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Dynamic.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
]
)
net_stdlib(
name = "system.enterpriseservices.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.EnterpriseServices.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.EnterpriseServices.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.directoryservices.dll",
]
)
net_stdlib(
name = "system.identitymodel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.dll",
deps = [
":mscorlib.dll",
":system.runtime.serialization.dll",
":system.dll",
":system.core.dll",
":system.web.applicationservices.dll",
":system.security.dll",
]
)
net_stdlib(
name = "system.identitymodel.selectors.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.Selectors.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.Selectors.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.identitymodel.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.identitymodel.services.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.Services.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IdentityModel.Services.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.identitymodel.dll",
":system.runtime.serialization.dll",
":system.web.applicationservices.dll",
]
)
net_stdlib(
name = "system.io.compression.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Compression.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Compression.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.io.compression.filesystem.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Compression.FileSystem.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Compression.FileSystem.dll",
deps = [
":mscorlib.dll",
":system.io.compression.dll",
":system.dll",
]
)
net_stdlib(
name = "system.io.log.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Log.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.IO.Log.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.management.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Management.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Management.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.configuration.install.dll",
":microsoft.jscript.dll",
]
)
net_stdlib(
name = "system.management.instrumentation.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Management.Instrumentation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Management.Instrumentation.dll",
deps = [
":mscorlib.dll",
":system.management.dll",
":system.dll",
":system.core.dll",
":system.configuration.install.dll",
]
)
net_stdlib(
name = "system.messaging.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Messaging.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Messaging.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.directoryservices.dll",
":system.configuration.install.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "system.net.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.net.http.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.Http.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.Http.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.net.http.webrequest.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.Http.WebRequest.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Net.Http.WebRequest.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.net.http.dll",
]
)
net_stdlib(
name = "system.numerics.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Numerics.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Numerics.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.printing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Printing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Printing.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
":system.xaml.dll",
":windowsbase.dll",
":reachframework.dll",
":presentationcore.dll",
]
)
net_stdlib(
name = "system.reflection.context.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Reflection.Context.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Reflection.Context.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.runtime.caching.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Caching.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Caching.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.runtime.durableinstancing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.DurableInstancing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.DurableInstancing.dll",
deps = [
":mscorlib.dll",
":system.xml.linq.dll",
":system.core.dll",
":system.runtime.serialization.dll",
":system.dll",
]
)
net_stdlib(
name = "system.runtime.remoting.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Remoting.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Remoting.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.runtime.serialization.formatters.soap.dll",
":system.directoryservices.dll",
]
)
net_stdlib(
name = "system.runtime.serialization.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Serialization.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Serialization.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.runtime.serialization.formatters.soap.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Serialization.Formatters.Soap.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Runtime.Serialization.Formatters.Soap.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.security.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Security.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Security.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.servicemodel.activation.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Activation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Activation.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.servicemodel.activities.dll",
":system.activities.dll",
":system.xaml.dll",
":system.xml.linq.dll",
":system.core.dll",
":system.net.http.dll",
":system.web.regularexpressions.dll",
":system.runtime.durableinstancing.dll",
]
)
net_stdlib(
name = "system.servicemodel.activities.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Activities.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Activities.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.xml.linq.dll",
":system.dll",
":system.identitymodel.dll",
":system.activities.dll",
":system.core.dll",
":system.runtime.durableinstancing.dll",
":system.runtime.serialization.dll",
":system.activities.durableinstancing.dll",
]
)
net_stdlib(
name = "system.servicemodel.channels.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Channels.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Channels.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.runtime.serialization.dll",
":system.dll",
":system.net.http.dll",
":system.web.services.dll",
]
)
net_stdlib(
name = "system.servicemodel.discovery.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Discovery.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Discovery.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.runtime.serialization.dll",
":system.servicemodel.channels.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.servicemodel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.dll",
":system.core.dll",
":system.runtime.serialization.dll",
":system.identitymodel.dll",
":system.directoryservices.dll",
":system.web.services.dll",
":system.enterpriseservices.dll",
":system.identitymodel.selectors.dll",
":system.web.applicationservices.dll",
":system.messaging.dll",
":system.xml.linq.dll",
":system.runtime.durableinstancing.dll",
":system.serviceprocess.dll",
":system.net.http.dll",
":system.servicemodel.activation.dll",
":system.security.dll",
]
)
net_stdlib(
name = "system.servicemodel.routing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Routing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Routing.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
":system.runtime.durableinstancing.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.servicemodel.web.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Web.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceModel.Web.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.runtime.serialization.dll",
":system.xml.linq.dll",
":system.web.extensions.dll",
":system.servicemodel.activation.dll",
":system.core.dll",
":system.servicemodel.channels.dll",
]
)
net_stdlib(
name = "system.serviceprocess.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceProcess.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.ServiceProcess.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.configuration.install.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "system.speech.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Speech.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Speech.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.transactions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Transactions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Transactions.dll",
deps = [
":mscorlib.dll",
":system.enterpriseservices.dll",
":system.dll",
]
)
net_stdlib(
name = "system.web.abstractions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Abstractions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Abstractions.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.web.applicationservices.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.ApplicationServices.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.ApplicationServices.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.web.datavisualization.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DataVisualization.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DataVisualization.Design.dll",
deps = [
":mscorlib.dll",
":system.web.datavisualization.dll",
":system.drawing.dll",
":system.dll",
":system.drawing.design.dll",
]
)
net_stdlib(
name = "system.web.datavisualization.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DataVisualization.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DataVisualization.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "system.web.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.core.dll",
":system.directoryservices.dll",
":system.enterpriseservices.dll",
":system.web.regularexpressions.dll",
":system.web.applicationservices.dll",
":system.componentmodel.dataannotations.dll",
":system.directoryservices.protocols.dll",
":system.security.dll",
":system.serviceprocess.dll",
":system.web.services.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
":microsoft.build.tasks.v4.0.dll",
":system.runtime.caching.dll",
]
)
net_stdlib(
name = "system.web.dynamicdata.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DynamicData.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DynamicData.Design.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.web.dynamicdata.dll",
":system.core.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "system.web.dynamicdata.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DynamicData.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.DynamicData.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.web.extensions.dll",
":system.core.dll",
":system.data.linq.dll",
":system.componentmodel.dataannotations.dll",
":system.web.entity.dll",
":system.data.entity.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.web.entity.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Entity.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Entity.Design.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.web.entity.dll",
":system.data.entity.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.web.entity.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Entity.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Entity.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.web.extensions.dll",
":system.data.entity.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.web.extensions.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Extensions.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Extensions.Design.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.web.extensions.dll",
":system.data.linq.dll",
]
)
net_stdlib(
name = "system.web.extensions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Extensions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Extensions.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.dll",
":system.web.services.dll",
":system.core.dll",
":system.runtime.serialization.dll",
":system.data.linq.dll",
":system.web.applicationservices.dll",
":system.servicemodel.activation.dll",
":system.data.services.client.dll",
":system.data.entity.dll",
]
)
net_stdlib(
name = "system.web.mobile.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Mobile.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Mobile.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
":system.drawing.design.dll",
":system.web.regularexpressions.dll",
]
)
net_stdlib(
name = "system.web.regularexpressions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.RegularExpressions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.RegularExpressions.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.web.routing.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Routing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Routing.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.web.services.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Services.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Web.Services.dll",
deps = [
":mscorlib.dll",
":system.enterpriseservices.dll",
":system.dll",
":system.directoryservices.dll",
]
)
net_stdlib(
name = "system.windows.controls.ribbon.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Controls.Ribbon.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Controls.Ribbon.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":windowsbase.dll",
":presentationcore.dll",
":system.dll",
":uiautomationprovider.dll",
":uiautomationtypes.dll",
]
)
net_stdlib(
name = "system.windows.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.windows.forms.datavisualization.design.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.DataVisualization.Design.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.DataVisualization.Design.dll",
deps = [
":mscorlib.dll",
":system.windows.forms.datavisualization.dll",
":system.drawing.dll",
":system.dll",
":system.drawing.design.dll",
]
)
net_stdlib(
name = "system.windows.forms.datavisualization.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.DataVisualization.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.DataVisualization.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
]
)
net_stdlib(
name = "system.windows.forms.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Forms.dll",
deps = [
":mscorlib.dll",
":system.drawing.dll",
":system.security.dll",
":system.dll",
":system.core.dll",
":accessibility.dll",
":system.deployment.dll",
":system.runtime.serialization.formatters.soap.dll",
]
)
net_stdlib(
name = "system.windows.input.manipulations.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Input.Manipulations.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Input.Manipulations.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.windows.presentation.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Presentation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Windows.Presentation.dll",
deps = [
":mscorlib.dll",
":system.dll",
":windowsbase.dll",
":system.addin.contract.dll",
":presentationcore.dll",
":system.addin.dll",
]
)
net_stdlib(
name = "system.workflow.activities.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.Activities.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.Activities.dll",
deps = [
":mscorlib.dll",
":system.workflow.componentmodel.dll",
":system.dll",
":system.drawing.dll",
":system.workflow.runtime.dll",
":system.web.services.dll",
":system.directoryservices.dll",
":system.web.applicationservices.dll",
]
)
net_stdlib(
name = "system.workflow.componentmodel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.ComponentModel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.ComponentModel.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.drawing.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
":system.core.dll",
":microsoft.build.tasks.v4.0.dll",
]
)
net_stdlib(
name = "system.workflow.runtime.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.Runtime.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Workflow.Runtime.dll",
deps = [
":mscorlib.dll",
":system.workflow.componentmodel.dll",
":system.activities.dll",
":system.dll",
":system.core.dll",
":system.xml.linq.dll",
":system.runtime.serialization.dll",
":system.messaging.dll",
]
)
net_stdlib(
name = "system.workflowservices.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.WorkflowServices.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.WorkflowServices.dll",
deps = [
":mscorlib.dll",
":system.workflow.componentmodel.dll",
":system.workflow.runtime.dll",
":system.dll",
":system.identitymodel.dll",
":system.drawing.dll",
":system.runtime.serialization.dll",
":system.servicemodel.activities.dll",
":system.activities.dll",
":system.servicemodel.activation.dll",
":system.messaging.dll",
]
)
net_stdlib(
name = "system.xaml.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xaml.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xaml.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.xml.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.data.sqlxml.dll",
]
)
net_stdlib(
name = "system.xml.linq.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.Linq.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.Linq.dll",
deps = [
":mscorlib.dll",
":system.runtime.serialization.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.xml.serialization.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.Serialization.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/System.Xml.Serialization.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "uiautomationclient.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationClient.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationClient.dll",
deps = [
":mscorlib.dll",
":windowsbase.dll",
":uiautomationtypes.dll",
":uiautomationprovider.dll",
":system.dll",
":accessibility.dll",
]
)
net_stdlib(
name = "uiautomationclientsideproviders.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationClientsideProviders.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationClientsideProviders.dll",
deps = [
":mscorlib.dll",
":uiautomationclient.dll",
":windowsbase.dll",
":accessibility.dll",
":system.dll",
":uiautomationprovider.dll",
":uiautomationtypes.dll",
]
)
net_stdlib(
name = "uiautomationprovider.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationProvider.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationProvider.dll",
deps = [
":mscorlib.dll",
":uiautomationtypes.dll",
":windowsbase.dll",
]
)
net_stdlib(
name = "uiautomationtypes.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationTypes.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/UIAutomationTypes.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "windowsbase.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/WindowsBase.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/WindowsBase.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":system.dll",
":accessibility.dll",
":system.core.dll",
":system.security.dll",
]
)
net_stdlib(
name = "windowsformsintegration.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/WindowsFormsIntegration.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/WindowsFormsIntegration.dll",
deps = [
":mscorlib.dll",
":system.xaml.dll",
":windowsbase.dll",
":presentationcore.dll",
":system.drawing.dll",
":system.dll",
":uiautomationprovider.dll",
]
)
net_stdlib(
name = "xamlbuildtask.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/XamlBuildTask.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/XamlBuildTask.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
":system.xaml.dll",
":microsoft.build.utilities.v4.0.dll",
":microsoft.build.framework.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.collections.concurrent.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Collections.Concurrent.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Collections.Concurrent.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.collections.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Collections.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Collections.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
]
)
net_stdlib(
name = "system.componentmodel.annotations.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.Annotations.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.Annotations.dll",
deps = [
":mscorlib.dll",
":system.componentmodel.dataannotations.dll",
]
)
net_stdlib(
name = "system.componentmodel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.componentmodel.eventbasedasync.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.EventBasedAsync.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ComponentModel.EventBasedAsync.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.diagnostics.contracts.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Contracts.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Contracts.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.diagnostics.debug.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Debug.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Debug.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.diagnostics.tools.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Tools.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Tools.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.diagnostics.tracing.dll",
version = "4.0.20.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Tracing.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Diagnostics.Tracing.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.dynamic.runtime.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Dynamic.Runtime.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Dynamic.Runtime.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.globalization.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Globalization.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Globalization.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.io.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.IO.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.IO.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.linq.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.linq.expressions.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Expressions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Expressions.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.linq.parallel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Parallel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Parallel.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.linq.queryable.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Queryable.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Linq.Queryable.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.net.networkinformation.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.NetworkInformation.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.NetworkInformation.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.net.primitives.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.Primitives.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.Primitives.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.net.requests.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.Requests.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.Requests.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.net.webheadercollection.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.WebHeaderCollection.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Net.WebHeaderCollection.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.objectmodel.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ObjectModel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ObjectModel.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.reflection.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.reflection.emit.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.reflection.emit.ilgeneration.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.ILGeneration.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.ILGeneration.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.reflection.emit.lightweight.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.Lightweight.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Emit.Lightweight.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.reflection.extensions.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Extensions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Extensions.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.reflection.primitives.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Primitives.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Reflection.Primitives.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.resources.resourcemanager.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Resources.ResourceManager.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Resources.ResourceManager.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.runtime.dll",
version = "4.0.20.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
":system.componentmodel.composition.dll",
]
)
net_stdlib(
name = "system.runtime.extensions.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Extensions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Extensions.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.runtime.handles.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Handles.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Handles.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.runtime.interopservices.dll",
version = "4.0.20.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.InteropServices.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.InteropServices.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
":system.dll",
]
)
net_stdlib(
name = "system.runtime.interopservices.windowsruntime.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.InteropServices.WindowsRuntime.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.InteropServices.WindowsRuntime.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.runtime.numerics.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Numerics.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Numerics.dll",
deps = [
":mscorlib.dll",
":system.numerics.dll",
]
)
net_stdlib(
name = "system.runtime.serialization.json.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Json.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Json.dll",
deps = [
":mscorlib.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.runtime.serialization.primitives.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Primitives.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Primitives.dll",
deps = [
":mscorlib.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.runtime.serialization.xml.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Xml.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Runtime.Serialization.Xml.dll",
deps = [
":mscorlib.dll",
":system.runtime.serialization.dll",
]
)
net_stdlib(
name = "system.security.principal.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Security.Principal.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Security.Principal.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.servicemodel.duplex.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Duplex.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Duplex.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.servicemodel.http.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Http.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Http.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.servicemodel.nettcp.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.NetTcp.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.NetTcp.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.servicemodel.primitives.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Primitives.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Primitives.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.servicemodel.security.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Security.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.ServiceModel.Security.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.text.encoding.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.Encoding.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.Encoding.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.text.encoding.extensions.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.Encoding.Extensions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.Encoding.Extensions.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.text.regularexpressions.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.RegularExpressions.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Text.RegularExpressions.dll",
deps = [
":mscorlib.dll",
":system.dll",
]
)
net_stdlib(
name = "system.threading.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.dll",
deps = [
":mscorlib.dll",
":system.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.threading.tasks.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Tasks.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Tasks.dll",
deps = [
":mscorlib.dll",
":system.core.dll",
]
)
net_stdlib(
name = "system.threading.tasks.parallel.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Tasks.Parallel.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Tasks.Parallel.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.threading.timer.dll",
version = "4.0.0.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Timer.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Threading.Timer.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.xml.readerwriter.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.ReaderWriter.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.ReaderWriter.dll",
deps = [
":mscorlib.dll",
]
)
net_stdlib(
name = "system.xml.xdocument.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.XDocument.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.XDocument.dll",
deps = [
":mscorlib.dll",
":system.xml.linq.dll",
]
)
net_stdlib(
name = "system.xml.xmlserializer.dll",
version = "4.0.10.0",
dotnet_context_data = context_data,
ref = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.XmlSerializer.dll",
stdlib_path = "@Microsoft.NETFramework.ReferenceAssemblies.net47.1.0.0//:build/.NETFramework/v4.7/Facades/System.Xml.XmlSerializer.dll",
deps = [
":mscorlib.dll",
]
)
| 44.480288 | 165 | 0.612081 |
83afb9339aa841d601430ada8501d4a8985be294 | 8,877 | py | Python | pyattck/configuration.py | aacienfuegos/pyattck | b3013b846aa4de414bb7654abdbf8bd19c491eda | [
"MIT"
] | null | null | null | pyattck/configuration.py | aacienfuegos/pyattck | b3013b846aa4de414bb7654abdbf8bd19c491eda | [
"MIT"
] | null | null | null | pyattck/configuration.py | aacienfuegos/pyattck | b3013b846aa4de414bb7654abdbf8bd19c491eda | [
"MIT"
] | null | null | null | import os
import json
import warnings
from urllib.parse import urlparse
from pathlib import Path
import yaml
from requests.api import request
from .utils.exceptions import UnknownFileError
class ConfigurationProperties(type):
__config_data = None
def __download_url_data(cls, url):
response = request('GET', url, **cls.requests_kwargs)
if response.status_code == 200:
return response.json()
return {}
def _check_if_path(cls, value):
if Path(value):
return True
return False
def _check_if_url(cls, value):
try:
if urlparse(value).scheme in ['http', 'https']:
return True
return False
except:
return False
def __get_absolute_path(cls, path_string):
return os.path.abspath(
os.path.expanduser(
os.path.expandvars(path_string)
)
)
def __validate_value_string(cls, value):
if cls._check_if_url(value):
return value
elif cls._check_if_path(value):
return value
else:
raise Exception('The provided value is neither a URL or file path')
def __write_to_disk(cls, path, data):
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except:
raise Exception('pyattck attempted to create the provided directories but was unable to: {}'.format(path))
with open(path, 'w+') as f:
if path.endswith('.json'):
json.dump(data, f)
elif path.endswith('.yml') or path.endswith('.yaml'):
yaml.dump(data, f)
else:
raise UnknownFileError(provided_value=path, known_values=['.json', '.yml', '.yaml'])
def __read_from_disk(cls, path):
if os.path.exists(path) and os.path.isfile(path):
try:
with open(path) as f:
if path.endswith('.json'):
return json.load(f)
elif path.endswith('.yml') or path.endswith('.yaml'):
return yaml.load(f, Loader=yaml.FullLoader)
else:
raise UnknownFileError(provided_value=path, known_values=['.json', '.yml', '.yaml'])
except:
warnings.warn(message=f"The provided config file {path} is not in the correct format. Using default values instead.")
pass
return None
def _save_json_data(cls, force: bool=False) -> None:
if not os.path.exists(cls.data_path):
try:
os.makedirs(cls.data_path)
except:
raise Exception(
'Unable to save data to the provided location: {}'.format(cls.data_path)
)
for json_data in ['enterprise_attck_json', 'pre_attck_json',
'mobile_attck_json', 'ics_attck_json', 'nist_controls_json',
'generated_attck_json', 'generated_nist_json']:
if cls._check_if_url(getattr(cls, json_data)):
try:
path = os.path.join(cls.data_path, "{json_data}.json".format(json_data=json_data))
if not os.path.exists(path) or force:
data = cls.__download_url_data(getattr(cls, json_data))
cls.__write_to_disk(path, data)
setattr(cls, '_' + json_data, path)
except:
raise Warning(f"Unable to download data from {json_data}")
cls.__update_config()
def __update_config(cls):
cls.__config_data = {
'data_path': cls.data_path,
'enterprise_attck_json': cls._enterprise_attck_json,
'pre_attck_json': cls._pre_attck_json,
'mobile_attck_json': cls._mobile_attck_json,
'ics_attck_json': cls._ics_attck_json,
'nist_controls_json': cls._nist_controls_json,
'generated_attck_json': cls._generated_attck_json,
'generated_nist_json': cls._generated_nist_json,
'config_file_path': cls._config_file_path
}
def get_data(cls, value: str) -> dict:
if cls._check_if_url(cls.config_data.get(value)):
return cls.__download_url_data(cls.config_data.get(value))
else:
return cls.__read_from_disk(cls.config_data.get(value))
@property
def requests_kwargs(cls):
return cls._requests_kwargs
@requests_kwargs.setter
def requests_kwargs(cls, value):
cls._requests_kwargs = value
@property
def use_config(cls):
return cls._use_config
@use_config.setter
def use_config(cls, value):
cls._use_config = bool(value)
@property
def save_config(cls):
return cls._save_config
@save_config.setter
def save_config(cls, value):
cls._save_config = bool(value)
@property
def config_file_path(cls):
return cls.__get_absolute_path(cls._config_file_path)
@config_file_path.setter
def config_file_path(cls, value):
cls._config_file_path = cls.__get_absolute_path(value)
cls.__update_config()
@property
def data_path(cls):
return cls.__get_absolute_path(cls._data_path)
@data_path.setter
def data_path(cls, value):
cls._data_path = cls.__get_absolute_path(value)
cls.__update_config()
@property
def config_data(cls):
if cls.use_config:
cls.__config_data = cls.__read_from_disk(cls.config_file_path)
if not cls.__config_data:
cls.__update_config()
else:
cls.__update_config()
if cls.save_config:
cls.__update_config()
cls.__write_to_disk(cls.config_file_path, cls.__config_data)
cls._save_json_data()
return cls.__config_data
@property
def enterprise_attck_json(cls):
return cls._enterprise_attck_json
@enterprise_attck_json.setter
def enterprise_attck_json(cls, value):
cls._enterprise_attck_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def pre_attck_json(cls):
return cls._pre_attck_json
@pre_attck_json.setter
def pre_attck_json(cls, value):
cls._pre_attck_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def mobile_attck_json(cls):
return cls._mobile_attck_json
@mobile_attck_json.setter
def mobile_attck_json(cls, value):
cls._mobile_attck_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def ics_attck_json(cls):
return cls._ics_attck_json
@ics_attck_json.setter
def ics_attck_json(cls, value):
cls._ics_attck_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def nist_controls_json(cls):
return cls._nist_controls_json
@nist_controls_json.setter
def nist_controls_json(cls, value):
cls._nist_controls_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def generated_attck_json(cls):
return cls._generated_attck_json
@generated_attck_json.setter
def generated_attck_json(cls, value):
cls._generated_attck_json = cls.__validate_value_string(value)
cls.__update_config()
@property
def generated_nist_json(cls):
return cls._generated_nist_json
@generated_nist_json.setter
def generated_nist_json(cls, value):
cls._generated_nist_json = cls.__validate_value_string(value)
cls.__update_config()
class Configuration(object, metaclass=ConfigurationProperties):
_use_config = False
_save_config = False
_config_file_path = '~/pyattck/config.yml'
_data_path = '~/pyattck/data'
_enterprise_attck_json = "https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json"
_pre_attck_json="https://raw.githubusercontent.com/mitre/cti/master/pre-attack/pre-attack.json"
_mobile_attck_json="https://raw.githubusercontent.com/mitre/cti/master/mobile-attack/mobile-attack.json"
_ics_attck_json="https://raw.githubusercontent.com/mitre/cti/master/ics-attack/ics-attack.json"
_nist_controls_json="https://raw.githubusercontent.com/center-for-threat-informed-defense/attack-control-framework-mappings/main/frameworks/attack_10_1/nist800_53_r4/stix/nist800-53-r4-controls.json"
_generated_attck_json="https://github.com/swimlane/pyattck/blob/master/generated_attck_data.json?raw=True"
_generated_nist_json="https://github.com/swimlane/pyattck/blob/master/attck_to_nist_controls.json?raw=True"
_requests_kwargs = {}
| 35.22619 | 203 | 0.640757 |
e89c36f1068e9c1410f9f47f853b49b298ccd7e6 | 3,269 | py | Python | scripts/mlflow_migration/db_util/dblib.py | tmankita/trains | 068a5296e12dcfa6f46b241dfadfb50b7dffaf8f | [
"Apache-2.0"
] | null | null | null | scripts/mlflow_migration/db_util/dblib.py | tmankita/trains | 068a5296e12dcfa6f46b241dfadfb50b7dffaf8f | [
"Apache-2.0"
] | null | null | null | scripts/mlflow_migration/db_util/dblib.py | tmankita/trains | 068a5296e12dcfa6f46b241dfadfb50b7dffaf8f | [
"Apache-2.0"
] | null | null | null | from typing import Type, Optional
from urllib.parse import urlparse
from sqlalchemy import distinct
from .schemas.metric import Metric
from .schemas.param import Param
from .schemas.run import Run
from .schemas.tag import Tag
DATABASE_ENGINES = ["postgresql", "mysql", "sqlite", "mssql"]
Session: Optional[Type] = None
def validate_db_uri(db_uri):
scheme = urlparse(db_uri).scheme
scheme_plus_count = scheme.count("+")
if scheme_plus_count == 0:
db_type = scheme
elif scheme_plus_count == 1:
db_type, _ = scheme.split("+")
else:
return False
if not db_type in DATABASE_ENGINES:
return False
return True
def one_element_tuple_list_to_untuple_list(tuple_list):
res = []
for (v,) in tuple_list:
res.append(str(v))
return res
def create_list_with_dummy_path(list):
res = []
for t in list:
res.append(t + ("",))
return res
def init_session(session):
global Session
if Session is None:
Session = session
def close():
global Session
if Session is not None:
Session.expunge_all()
Session = None
def get_run_uuids():
global Session
session = Session()
run_ids = session.query(Run.run_uuid).all()
session.close()
return create_list_with_dummy_path(run_ids)
def get_metric_values_by_run_uuid(run_uuid, metric_name):
global Session
session = Session()
values = (
session.query(Metric.value)
.filter(Metric.run_uuid == run_uuid)
.filter(Metric.key == metric_name)
.all()
)
session.close()
return one_element_tuple_list_to_untuple_list(values)
def get_metric_names_by_run_uuid(run_uuid):
global Session
session = Session()
values = (
session.query(distinct(Metric.key)).filter(Metric.run_uuid == run_uuid).all()
)
session.close()
return one_element_tuple_list_to_untuple_list(values)
def get_param_value_by_run_uuid(run_uuid, param_name):
global Session
session = Session()
values = (
session.query(Param.value)
.filter(Param.run_uuid == run_uuid)
.filter(Param.key == param_name)
.all()
)
session.close()
return values[0][0]
def get_param_names_by_run_uuid(run_uuid):
global Session
session = Session()
values = session.query(distinct(Param.key)).filter(Param.run_uuid == run_uuid).all()
session.close()
return one_element_tuple_list_to_untuple_list(values)
def get_tags_by_run_uuid(run_uuid):
global Session
session = Session()
values = session.query(Tag.key, Tag.value).filter(Tag.run_uuid == run_uuid).all()
session.close()
return values
def get_run_by_run_uuid(run_uuid):
global Session
session = Session()
values = (
session.query(Run.start_time, Run.end_time, Run.artifact_uri, Run.name)
.filter(Run.run_uuid == run_uuid)
.all()
)
session.close()
return values[0]
__all__ = [
"get_run_uuids",
"get_metric_values_by_run_uuid",
"get_metric_names_by_run_uuid",
"get_param_value_by_run_uuid",
"get_param_names_by_run_uuid",
"get_tags_by_run_uuid",
"get_run_by_run_uuid",
"validate_db_uri",
"close",
"init_session",
]
| 23.021127 | 88 | 0.672989 |
82016f6fa33c512f4da35a81fc2933d876b7db90 | 4,583 | py | Python | scripts/build/builders/esp32.py | rachel-li-jci/connectedhomeip | 7200076e7c690d9fbad4c1d3d13db1d11d87d259 | [
"Apache-2.0"
] | null | null | null | scripts/build/builders/esp32.py | rachel-li-jci/connectedhomeip | 7200076e7c690d9fbad4c1d3d13db1d11d87d259 | [
"Apache-2.0"
] | 1 | 2021-06-16T08:38:45.000Z | 2021-06-16T08:38:45.000Z | scripts/build/builders/esp32.py | rachel-li-jci/connectedhomeip | 7200076e7c690d9fbad4c1d3d13db1d11d87d259 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import shlex
from enum import Enum, auto
from .builder import Builder
class Esp32Board(Enum):
DevKitC = auto()
M5Stack = auto()
C3DevKit = auto()
class Esp32App(Enum):
ALL_CLUSTERS = auto()
LOCK = auto()
SHELL = auto()
BRIDGE = auto()
TEMPERATURE_MEASUREMENT = auto()
@property
def ExampleName(self):
if self == Esp32App.ALL_CLUSTERS:
return 'all-clusters-app'
elif self == Esp32App.LOCK:
return 'lock-app'
elif self == Esp32App.SHELL:
return 'shell'
elif self == Esp32App.BRIDGE:
return 'bridge-app'
elif self == Esp32App.TEMPERATURE_MEASUREMENT:
return 'temperature-measurement-app'
else:
raise Exception('Unknown app type: %r' % self)
@property
def AppNamePrefix(self):
if self == Esp32App.ALL_CLUSTERS:
return 'chip-all-clusters-app'
elif self == Esp32App.LOCK:
return 'chip-lock-app'
elif self == Esp32App.SHELL:
return 'chip-shell'
elif self == Esp32App.BRIDGE:
return 'chip-bridge-app'
elif self == Esp32App.TEMPERATURE_MEASUREMENT:
return 'chip-temperature-measurement-app'
else:
raise Exception('Unknown app type: %r' % self)
def FlashBundleName(self):
return self.AppNamePrefix + '.flashbundle.txt'
def DefaultsFileName(board: Esp32Board, app: Esp32App, enable_rpcs: bool):
if app != Esp32App.ALL_CLUSTERS:
# only all-clusters has a specific defaults name
return None
rpc = "_rpc" if enable_rpcs else ""
if board == Esp32Board.DevKitC:
return 'sdkconfig{}.defaults'.format(rpc)
elif board == Esp32Board.M5Stack:
return 'sdkconfig_m5stack{}.defaults'.format(rpc)
elif board == Esp32Board.C3DevKit:
return 'sdkconfig_c3devkit{}.defaults'.format(rpc)
else:
raise Exception('Unknown board type')
class Esp32Builder(Builder):
def __init__(self,
root,
runner,
board: Esp32Board = Esp32Board.M5Stack,
app: Esp32App = Esp32App.ALL_CLUSTERS,
enable_rpcs: bool = False):
super(Esp32Builder, self).__init__(root, runner)
self.board = board
self.app = app
self.enable_rpcs = enable_rpcs
def _IdfEnvExecute(self, cmd, cwd=None, title=None):
self._Execute(
['bash', '-c', 'source $IDF_PATH/export.sh; %s' % cmd],
cwd=cwd,
title=title)
def generate(self):
if os.path.exists(os.path.join(self.output_dir, 'build.ninja')):
return
defaults = DefaultsFileName(self.board, self.app, self.enable_rpcs)
cmd = 'idf.py'
if defaults:
cmd += " -D SDKCONFIG_DEFAULTS='%s'" % defaults
cmd += ' -C examples/%s/esp32 -B %s reconfigure' % (
self.app.ExampleName, shlex.quote(self.output_dir))
# This will do a 'cmake reconfigure' which will create ninja files without rebuilding
self._IdfEnvExecute(
cmd, cwd=self.root, title='Generating ' + self.identifier)
def _build(self):
logging.info('Compiling Esp32 at %s', self.output_dir)
self._IdfEnvExecute(
"ninja -C '%s'" % self.output_dir, title='Building ' + self.identifier)
def build_outputs(self):
return {
self.app.AppNamePrefix + '.elf':
os.path.join(self.output_dir, self.app.AppNamePrefix + '.elf'),
self.app.AppNamePrefix + '.map':
os.path.join(self.output_dir, self.app.AppNamePrefix + '.map'),
}
def flashbundle(self):
with open(os.path.join(self.output_dir, self.app.FlashBundleName()), 'r') as fp:
return {
l.strip(): os.path.join(self.output_dir, l.strip()) for l in fp.readlines() if l.strip()
}
| 32.048951 | 104 | 0.614881 |
d6ab1ceac71f71b48c2f515ef9f292066cacb522 | 2,503 | py | Python | dm/dhcp_test.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | 2 | 2017-10-03T16:06:29.000Z | 2020-09-08T13:03:13.000Z | dm/dhcp_test.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | null | null | null | dm/dhcp_test.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | 1 | 2017-05-07T17:39:02.000Z | 2017-05-07T17:39:02.000Z | #!/usr/bin/python
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
# pylint:disable=invalid-name
"""Unit tests for dhcp.py implementation."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import google3
from tr.wvtest import unittest
import tr.cwmpdate
import tr.handle
import dhcp
class DhcpTest(unittest.TestCase):
"""Tests for dhcp.py."""
def testClient(self):
client = dhcp.Client(ipaddr='1.2.3.4', chaddr='00:01:02:03:04:05',
expiry=1389022961, clientid='clientid_1',
hostname='hostname_1', userclassid='userclassid_1',
vendorclassid='vendorclassid_1')
self.assertEqual(client.Chaddr, '00:01:02:03:04:05')
self.assertEqual(client.IPv4AddressNumberOfEntries, 1)
client.AddIP(ipaddr='1.2.3.5', expiry=1389022962)
self.assertEqual(client.IPv4AddressNumberOfEntries, 2)
self.assertEqual(client.IPv4AddressList['1'].IPAddress, '1.2.3.4')
d = tr.cwmpdate.format(client.IPv4AddressList['1'].LeaseTimeRemaining)
self.assertEqual(d, '2014-01-06T15:42:41Z')
self.assertEqual(client.IPv4AddressList['2'].IPAddress, '1.2.3.5')
d = tr.cwmpdate.format(client.IPv4AddressList['2'].LeaseTimeRemaining)
self.assertEqual(d, '2014-01-06T15:42:42Z')
self.assertEqual(client.OptionNumberOfEntries, 4)
self.assertEqual(client.OptionList['1'].Tag, dhcp.CL)
self.assertEqual(client.OptionList['1'].Value, 'clientid_1')
self.assertEqual(client.OptionList['2'].Tag, dhcp.HN)
self.assertEqual(client.OptionList['2'].Value, 'hostname_1')
self.assertEqual(client.OptionList['3'].Tag, dhcp.UC)
self.assertEqual(client.OptionList['3'].Value, 'userclassid_1')
self.assertEqual(client.OptionList['4'].Tag, dhcp.VC)
self.assertEqual(client.OptionList['4'].Value, 'vendorclassid_1')
tr.handle.ValidateExports(client)
if __name__ == '__main__':
unittest.main()
| 40.370968 | 76 | 0.719537 |
379a3de6db3c4a30af0922fcaeb261d40924b106 | 2,921 | py | Python | plots/coldstarts.py | Be-FaaS/BeFaaS-analysis | a63025d9ee9f0835f2f98346df59c42f058e0d6b | [
"Apache-2.0"
] | null | null | null | plots/coldstarts.py | Be-FaaS/BeFaaS-analysis | a63025d9ee9f0835f2f98346df59c42f058e0d6b | [
"Apache-2.0"
] | null | null | null | plots/coldstarts.py | Be-FaaS/BeFaaS-analysis | a63025d9ee9f0835f2f98346df59c42f058e0d6b | [
"Apache-2.0"
] | null | null | null | import pathlib
from collections import defaultdict
import pandas as pd
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import seaborn as sns
from argmagic import argmagic
import befaas as bf
from befaas.helper import group_by
from befaas.logentry import ColdstartLog
from befaas.calls import create_requestgroups
sns.set_style("whitegrid")
def timedelta_to_ms(timedelta):
return timedelta.total_seconds() * 1000
def plot_kdeplot(data, ylabel, title, log_scaling=True):
"""
Args:
data: Dict of list of numbers.
"""
fig, ax = plt.subplots(1, 1, figsize=(8, 6), dpi=300)
plt.xticks(rotation=90)
d = []
for k, v in data.items():
for i in v:
d.append((k, i))
df = pd.DataFrame(d, columns=["name", "inv"])
for func_name in data.keys():
if func_name in ["email", "payment"]:
continue
# Subset to the airline
subset = df[df['name'] == func_name]
# Draw the density plot
plt.hist(subset['inv'], label=func_name, ax=ax)
ax.set_ylabel(ylabel)
ax.set_title(title)
if log_scaling:
ax.set_yscale("log")
fig.tight_layout()
return fig
def plot_function_coldstarts(data, plot_dir):
cld_groups = [d for d in data if isinstance(d, ColdstartLog)]
fn_calls = group_by(cld_groups, lambda c: c.function)
function_coldstarts = {}
for function, calls in fn_calls.items():
function_coldstarts[function] = [
c.timestamp for c in calls if c.timestamp is not None
]
fig = plot_kdeplot(
function_coldstarts,
ylabel="Coldstart Count",
title="KDE of function coldstarts",
log_scaling=True
)
plot_path = str(plot_dir / "function_coldstarts.png")
print(f"Plotting to {plot_path}")
fig.savefig(plot_path)
def plot_function_execution_time(data, plot_dir):
cgroups = create_requestgroups(data)
fn_calls = group_by(cgroups, lambda c: c.function)
function_durations = {}
for function, calls in fn_calls.items():
if function not in ("artillery", None):
function_durations[function] = [
c.start_time for c in calls if c.start_time is not None
]
fig = plot_kdeplot(
function_durations,
ylabel="Invocation Count",
title="KDE of function invocations",
log_scaling=True
)
plot_path = str(plot_dir / "function_invocations.png")
print(f"Plotting to {plot_path}")
fig.savefig(plot_path)
def main(input_data: pathlib.Path, plot_dir: pathlib.Path):
plot_dir = plot_dir / input_data.stem
plot_dir.mkdir(exist_ok=True, parents=True)
data = bf.load_logs(input_data)
plot_function_coldstarts(data, plot_dir)
# plot_function_execution_time(data, plot_dir)
if __name__ == "__main__":
argmagic(main, positional=("input_data", "plot_dir"))
| 25.4 | 71 | 0.661417 |
ac753cce1d322125e26ea524a5e930975829dde8 | 3,105 | py | Python | drl_grasping/envs/tasks/reach/reach_depth_image.py | Tiamat-Tech/drl_grasping | e67efee1cdbeeb3cb1e4d028890bbfc601e7840c | [
"BSD-3-Clause"
] | 126 | 2020-11-02T11:08:07.000Z | 2022-03-31T16:25:06.000Z | drl_grasping/envs/tasks/reach/reach_depth_image.py | Tiamat-Tech/drl_grasping | e67efee1cdbeeb3cb1e4d028890bbfc601e7840c | [
"BSD-3-Clause"
] | 68 | 2020-11-02T13:18:29.000Z | 2022-02-27T17:38:50.000Z | drl_grasping/envs/tasks/reach/reach_depth_image.py | Tiamat-Tech/drl_grasping | e67efee1cdbeeb3cb1e4d028890bbfc601e7840c | [
"BSD-3-Clause"
] | 27 | 2021-01-20T16:15:41.000Z | 2022-03-15T10:44:43.000Z | from drl_grasping.envs.tasks.reach import Reach
from drl_grasping.perception import CameraSubscriber
from gym_ignition.utils.typing import Observation
from gym_ignition.utils.typing import ObservationSpace
from typing import Tuple
import abc
import gym
import numpy as np
# TODO: ReachDepthImage environment currently does not currently have a working CnnPolicy
class ReachDepthImage(Reach, abc.ABC):
# Overwrite parameters for ManipulationGazeboEnvRandomizer
_camera_enable: bool = True
_camera_type: str = 'depth_camera'
_camera_width: int = 128
_camera_height: int = 128
_camera_update_rate: int = 10
_camera_horizontal_fov: float = 1.0
_camera_vertical_fov: float = 1.0
_camera_position: Tuple[float, float, float] = (1.1, -0.75, 0.45)
_camera_quat_xyzw: Tuple[float, float,
float, float] = (-0.0402991, -0.0166924, 0.9230002, 0.3823192)
_camera_ros2_bridge_depth: bool = True
def __init__(self,
agent_rate: float,
robot_model: str,
restrict_position_goal_to_workspace: bool,
sparse_reward: bool,
act_quick_reward: float,
required_accuracy: float,
verbose: bool,
**kwargs):
# Initialize the Task base class
Reach.__init__(self,
agent_rate=agent_rate,
robot_model=robot_model,
restrict_position_goal_to_workspace=restrict_position_goal_to_workspace,
sparse_reward=sparse_reward,
act_quick_reward=act_quick_reward,
required_accuracy=required_accuracy,
verbose=verbose,
**kwargs)
# Perception (RGB camera)
self.camera_sub = CameraSubscriber(topic=f'/{self._camera_type}',
is_point_cloud=False,
node_name=f'drl_grasping_depth_camera_sub_{self.id}')
def create_observation_space(self) -> ObservationSpace:
# 0:height*width - depth image
return gym.spaces.Box(low=0,
high=np.inf,
shape=(self._camera_height,
self._camera_width, 1),
dtype=np.float32)
def get_observation(self) -> Observation:
# Get the latest image
image = self.camera_sub.get_observation()
# Construct from buffer and reshape
depth_image = np.frombuffer(image.data, dtype=np.float32).reshape(self._camera_height,
self._camera_width, 1)
# Replace all instances of infinity with 0
depth_image[depth_image == np.inf] = 0.0
# Create the observation
observation = Observation(depth_image)
if self._verbose:
print(f"\nobservation: {observation}")
# Return the observation
return observation
| 37.865854 | 96 | 0.589694 |
77f2e1e6b21d5ce45df4385d4b7e8c193fcad8ea | 1,862 | py | Python | cutRod.py | Jhan960102/Algorithm-Note | 888481b191b65edd9f5f29ef627cf233a5d3e30a | [
"Apache-2.0"
] | null | null | null | cutRod.py | Jhan960102/Algorithm-Note | 888481b191b65edd9f5f29ef627cf233a5d3e30a | [
"Apache-2.0"
] | null | null | null | cutRod.py | Jhan960102/Algorithm-Note | 888481b191b65edd9f5f29ef627cf233a5d3e30a | [
"Apache-2.0"
] | null | null | null | # -*- encoding:utf-8 -*-
# @Author: jhan
# @FileName: cutRod.py
# @Date: 2022/1/12 10:51
p = [0, 1, 5, 8, 9, 10, 17, 17, 20, 24, 30]
def cut_rod_recurision_1(p, n):
if n == 0:
return 0
else:
res = p[n]
for i in range(1, n):
res = max(res, (cut_rod_recurision_1(p, i) + cut_rod_recurision_1(p, n-i)))
return res
def cut_rod_recurision_2(p, n):
if n == 0:
return 0
else:
res = 0
for i in range(1, n+1):
res = max(res, p[i] + cut_rod_recurision_2(p, n-i))
return res
def cut_rod_dp(p, n):
r = [0]
for i in range(1, n+1):
res = 0
for j in range(1, i+1):
res = max(res, p[j]+r[i-j])
r.append(res)
return r[n]
def cut_rod_extend(p, n):
r, s = [0], [0]
for i in range(1, n+1):
res_r, res_s = 0, 0
for j in range(1, i+1):
if p[j] + r[i-j] > res_r:
res_r = p[j] + r[i-j]
res_s = j
r.append(res_r)
s.append(res_s)
return r[n], s
def cut_rod_solution(p, n):
r, s = cut_rod_extend(p, n)
ans = []
while n > 0:
ans.append(s[n])
n -= s[n]
return ans
def lcs_length(x, y):
m, n = len(x), len(y)
dp = [[0] * (n+1) for _ in range(m+1)]
for i in range(1, m+1):
for j in range(1, n+1):
if x[i-1] == y[j-1]:
dp[i][j] = dp[i-1][j-1] + 1
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
return dp[m][n]
def gcd1(a, b):
if b == 0:
return a
else:
return gcd1(b, a % b)
def gcd2(a, b):
while b > 0:
r = a % b
a = b
b = r
return a
a = 'ABCBDAB'
b = 'BDCABA'
print(gcd2(12, 16)) | 20.688889 | 88 | 0.422127 |
720ae1273099ff79ecb69f2ceb5328169b9e4a88 | 21,027 | py | Python | back of PSSM/PSSM/pssm/models/rstereo # back of matrix aggregation.py | lidongyv/PSSM | 61ef78bc465fd53fb128d0aa1b913f787c8c7f74 | [
"Apache-2.0"
] | null | null | null | back of PSSM/PSSM/pssm/models/rstereo # back of matrix aggregation.py | lidongyv/PSSM | 61ef78bc465fd53fb128d0aa1b913f787c8c7f74 | [
"Apache-2.0"
] | null | null | null | back of PSSM/PSSM/pssm/models/rstereo # back of matrix aggregation.py | lidongyv/PSSM | 61ef78bc465fd53fb128d0aa1b913f787c8c7f74 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author: yulidong
# @Date: 2018-07-17 10:44:43
# @Last Modified by: yulidong
# @Last Modified time: 2018-08-30 20:14:54
# -*- coding: utf-8 -*-
# @Author: lidong
# @Date: 2018-03-20 18:01:52
# @Last Modified by: yulidong
# @Last Modified time: 2018-07-16 22:16:14
import time
import torch
import numpy as np
import torch.nn as nn
import math
from math import ceil
from torch.autograd import Variable
from torch.nn.functional import cosine_similarity as cosine_s
from pssm import caffe_pb2
from pssm.models.utils import *
rsn_specs = {
'scene':
{
'n_classes': 9,
'input_size': (540, 960),
'block_config': [3, 4, 23, 3],
},
}
group_dim=8
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
if stride==1:
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
if stride==2:
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=2, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.relu = nn.LeakyReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
if self.downsample is not None:
residual = self.downsample(x)
# print(residual.shape)
# print(out.shape)
out += residual
out = self.relu(out)
return out
class feature_extraction(nn.Module):
def __init__(self):
super(feature_extraction, self).__init__()
self.inplanes = 32
self.layer1 = self._make_layer(BasicBlock, 32, 3, 1,1,1)
self.branch1 = nn.Sequential(nn.AvgPool2d((54, 96), stride=(54,96)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.branch2 = nn.Sequential(nn.AvgPool2d((27, 48), stride=(27,48)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.branch3 = nn.Sequential(nn.AvgPool2d((36, 64), stride=(36,64)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.branch4 = nn.Sequential(nn.AvgPool2d((18, 32), stride=(18,32)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.branch5 = nn.Sequential(nn.AvgPool2d((9, 16), stride=(9,16)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.branch6 = nn.Sequential(nn.AvgPool2d((3, 8), stride=(3,8)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.LeakyReLU(inplace=True))
self.lastconv = nn.Sequential(nn.Conv2d(80, 64, 3, 1, 1, 1),
nn.GroupNorm(group_dim,64),
nn.LeakyReLU(inplace=True),
nn.Conv2d(64, 32, 3, 1, 1, 1),
nn.GroupNorm(group_dim,32),
nn.LeakyReLU(inplace=True),
)
def _make_layer(self, block, planes, blocks, stride, pad, dilation):
downsample = None
layers = []
layers.append(block(self.inplanes, planes, stride))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes,1))
return nn.Sequential(*layers)
def forward(self, x):
# output = self.conv1(x)
# output = self.gn1(output)
# output = self.relu1(output)
# output = self.conv2(output)
# output = self.gn2(output)
# output = self.relu2(output)
# output = self.conv3(output)
# output = self.gn3(output)
# output = self.relu3(output)
output_skip = self.layer1(x)
# output_skip=x
output_branch1 = self.branch1(output_skip)
output_branch1 = F.interpolate(output_branch1, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch2 = self.branch2(output_skip)
output_branch2 = F.interpolate(output_branch2, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch3 = self.branch3(output_skip)
output_branch3 = F.interpolate(output_branch3, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch4 = self.branch4(output_skip)
output_branch4 = F.interpolate(output_branch4, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch5 = self.branch5(output_skip)
output_branch5 = F.interpolate(output_branch5, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch6 = self.branch6(output_skip)
output_branch6 = F.interpolate(output_branch6, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_feature = torch.cat((output_skip, output_branch6, output_branch5, output_branch4, output_branch3, output_branch2, output_branch1), 1)
output_feature = self.lastconv(output_feature)
#print(output_feature.shape)
return output_feature
class feature_extraction2(nn.Module):
def __init__(self):
super(feature_extraction2, self).__init__()
self.inplanes = 32
self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1,
bias=False,dilation=1)
self.gn1 = nn.GroupNorm(group_dim,32)
self.relu1 = nn.LeakyReLU(inplace=True)
self.conv2 = nn.Conv2d(32, 32, kernel_size=3, stride=1, padding=1,
bias=False,dilation=1)
self.gn2 = nn.GroupNorm(group_dim,32)
self.relu2 = nn.LeakyReLU(inplace=True)
self.conv3 = nn.Conv2d(32, 32, kernel_size=7, stride=1, padding=6,
bias=False,dilation=2)
self.gn3 = nn.GroupNorm(group_dim,32)
self.relu3 = nn.LeakyReLU(inplace=True)
self.layer1 = self._make_layer(BasicBlock, 32, 1, 1,1,1)
def _make_layer(self, block, planes, blocks, stride, pad, dilation):
downsample = None
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes,1,None,))
return nn.Sequential(*layers)
def forward(self, x):
output = self.conv1(x)
output = self.gn1(output)
output = self.relu1(output)
output = self.conv2(output)
output = self.gn2(output)
output = self.relu2(output)
output = self.conv3(output)
output = self.gn3(output)
output = self.relu3(output)
#print(output.shape)
output = self.layer1(output)
return output
class ss_argmin(nn.Module):
def __init__(self):
super(ss_argmin, self).__init__()
self.softmax = nn.Softmax(dim=-1)
def forward(self,x,min,max):
one=torch.ones(1)
zero=torch.zeros(1)
x=self.softmax(x)
index=torch.ones_like(x)*torch.range(min,max)
disparity= torch.sum(x*index,dim=-1)
v,i=torch.topk(x,k=1,dim=-1)
mask_1=torch.squeeze(torch.where(v>0.7,one,zero))
v,i=torch.topk(x,k=5,dim=-1)
v_sum=torch.sum(v,-1)
mask_2=torch.squeeze(torch.where(v_s>0.7,one,zero))
i_dis=torch.max(i,-1)[0]-torch.min(i,-1)[0]
mask_3=torch.squeeze(torch.where(i_dis<6,one,zero))
mask=mask_1+mask_2*mask_3
mask=torch.where(mask>0,one,zero)
return disparity*mask
class rstereo(nn.Module):
def __init__(self,
n_classes=9,
block_config=[3, 4, 6, 3],
input_size= (480, 640),
version='scene'):
super(rstereo, self).__init__()
self.feature_extraction=feature_extraction().cuda(0)
self.feature_extraction2=feature_extraction2().cuda(0)
# self.aggregation_sparse=aggregation_sparse()
# self.aggregation_dense=aggregation_dense()
self.ss_argmin=ss_argmin()
# self.refinement_sparse=aggregation_sparse()
# self.refinement_dense=aggregation_dense()
def cluster(self,feature,mask):
one=torch.ones(1).cuda(2)
zero=torch.zeros(1).cuda(2)
count=torch.sum(mask)
mean=torch.sum(torch.sum(feature,dim=-1),dim=-1)/count
mean=mean.view(mean.shape[0],mean.shape[1],1,1)
# weights=torch.where(mask==one,torch.norm(feature-mean,dim=1),zeros)
# weights=torch.exp(weights/torch.max(weights)).reshape(weights.shape[0],weights.shape[1],1)
#print(mask.shape,feature.shape,mean.shape)
weights=torch.where(mask==one,torch.norm(feature-mean,dim=1),zero)
weights=torch.exp(-weights).reshape(weights.shape[0],weights.shape[1],weights.shape[2],1)
return weights
def forward(self, l,r,P,pre1,pre2):
#self.P=P[1,0]
#0 l to r,1 min,2 max
#[l_box,r_box,match],[min_d,max_d]
start_time=time.time()
with torch.no_grad():
self.pre=pre1.cuda(2)
self.pre2=pre2.cuda(2)
P1=P[...,0].cuda(2)
P2=P[...,3].cuda(2)
P3=P[...,1].cuda(2)
P4=P[...,2].cuda(2)
#feature extraction
l_mask=P2-P1
s_mask=P1
#l_mask=l_mask.byte()
#s_mask=s_mask.byte()
#basic cuda 524
#print(l.type)
#1923
#print(torch.cuda.memory_allocated(1))
#2727
l_sf=self.feature_extraction2(l)
l_lf=self.feature_extraction(l_sf)
#print(torch.cuda.memory_allocated(2))
#the cuda won't copy the volume to the new gpu
# a=l_lf.cuda(1)
# b=l_lf.cuda(2)
# c=l_sf.cuda(3)
r_sf=self.feature_extraction2(r)
r_lf=self.feature_extraction(r_sf)
#print(torch.cuda.memory_allocated(1))
#3267
#print(torch.cuda.memory_allocated(2))
#reshape the mask to batch and channel
disparity=torch.zeros([540,960]).cuda(2)
one=torch.ones(1).cuda(2)
zero=torch.zeros(1).cuda(2)
cost_volume=[]
#5710
#print(value)
l_lf=l_lf.cuda(2)
r_lf=r_lf.cuda(2)
r_sf=r_sf.cuda(2)
l_sf=l_sf.cuda(2)
#985
#feature=torch.masked_select(l_sf,s_mask)
#feature=torch.masked_select(l_lf,l_mask)+torch.masked_select(l_sf,s_mask)
feature=l_lf*l_mask+l_sf*s_mask
feature=torch.where((l_mask+s_mask)>0,feature,l_lf)
# cost_s=[]
# cost_l=[]
# for m in range(10):
count=0
start_time=time.time()
for z in range(10):
start_time=time.time()
for i in range(150):
#ground 0-270, sky 0-40
#intial 0.46, after 0.18
with torch.no_grad():
if i> torch.max(P3).type(torch.int32):
break
min_d=pre1[0,0,i].long()
max_d=pre1[0,1,i].long()
#object_mask=torch.where(P3==i,one,zero)
x1,y1,x2,y2,size=pre2[0,i].long()
cost_volume=torch.zeros(x2-x1,y2-y1,max_d-min_d).cuda(2)
object_mask=P3[0,x1:x2,y1:y2]
object_mask=torch.where(object_mask==i,one,zero)
s_mask_o=object_mask*s_mask[0,x1:x2,y1:y2]
l_mask_o=object_mask*l_mask[0,x1:x2,y1:y2]
s_match=s_mask_o.nonzero().cuda(2)
l_match=l_mask_o.nonzero().cuda(2)
if s_match.shape[0]==0:
s_match=object_mask.nonzero().cuda(2)
if l_match.shape[0]==0:
l_match=object_mask.nonzero().cuda(2)
s_l_o=feature[...,s_match[:,0],s_match[:,1]]
l_l_o=feature[...,l_match[:,0],l_match[:,1]]
num_s=s_match.shape[0]
num_l=l_match.shape[0]
#print(sy_match.shape)
with torch.no_grad():
sy_match=s_match[:,1]
sx_match=s_match[:,0]
ly_match=l_match[:,1]
lx_match=l_match[:,0]
d=max_d-min_d
#print(d)
sx_match=sx_match.repeat(1,d)
sy_match=sy_match.repeat(1,d)
#sy=sy_match
range_d_s=torch.arange(min_d,max_d).cuda(2).repeat(s_match.shape[0],1).transpose(1,0).contiguous().view_as(sy_match)
sy_match-=range_d_s
lx_match=lx_match.repeat(1,d)
ly_match=ly_match.repeat(1,d)
range_d_l=torch.arange(min_d,max_d).cuda(2).repeat(l_match.shape[0],1).transpose(1,0).contiguous().view_as(ly_match)
ly_match-=range_d_l
s_r_o_t=r_sf[...,sx_match,sy_match].reshape(1,32,s_l_o.shape[-1],d)
s_l_o=s_l_o.repeat(1,1,1,d).reshape(1,32,s_l_o.shape[-1],d)
l_r_o_t=r_lf[...,lx_match,ly_match].reshape(1,32,l_l_o.shape[-1],d)
l_l_o=l_l_o.repeat(1,1,1,d).reshape(1,32,l_l_o.shape[-1],d)
# cost_s.append(torch.where(sy_match.reshape(1,s_l_o.shape[-2],d)>=0,cosine_s(s_l_o,s_r_o_t),zero))
# cost_l.append(torch.where(ly_match.reshape(1,l_l_o.shape[-2],d)>=0,cosine_s(l_l_o,l_r_o_t),zero))
cost_s=torch.where(sy_match.reshape(1,s_l_o.shape[-2],d)>=0,cosine_s(s_l_o,s_r_o_t),zero)
cost_l=torch.where(ly_match.reshape(1,l_l_o.shape[-2],d)>=0,cosine_s(l_l_o,l_r_o_t),zero)
#cost_volume=cost_s+cost_l
#print(torch.cuda.memory_allocated(2)/1e+6)
#time.sleep(30)
#convert to volume
with torch.no_grad():
sy_match=sy_match+range_d_s
range_d_s=range_d_s-min_d
#sparse tensor
cost_volume[sx_match.squeeze(),sy_match.squeeze(),range_d_s.squeeze()]=cost_s.view_as(sy_match).squeeze()
with torch.no_grad():
ly_match=ly_match+range_d_l
range_d_l=range_d_l-min_d
cost_volume[lx_match.squeeze(),ly_match.squeeze(),range_d_l.squeeze()]=cost_l.view_as(ly_match).squeeze()
#print(cost_volume.nonzero().shape)
#cost_s
# print(time.time()-start_time)
# time.sleep(100)
#aggregation
a_volume=torch.zeros_like(cost_volume).cuda(2)
object_r=torch.where(P3[0,x1:x2,y1:y2]==i,P4[0,x1:x2,y1:y2],-one)
max_r=torch.max(object_r).long()
#start_time=time.time()
for j in range(1,max_r+1):
with torch.no_grad():
plane_mask=torch.where(object_r==j,one,zero)
index=plane_mask.nonzero().long().cuda()
if index.shape[0]<1:
continue
xp1,xp2,yp1,yp2=torch.min(index[:,0]),torch.max(index[:,0])+1,torch.min(index[:,1]),torch.max(index[:,1])+1
#xp1,xp2,yp1,yp2.r_size=self.pre[0,0][1]
plane_mask=plane_mask[...,xp1:xp2,yp1:yp2]
s_plane_mask=plane_mask*s_mask[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2]
l_plane_mask=plane_mask*l_mask[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2]
plane=cost_volume[...,xp1:xp2,yp1:yp2,:]
#rint(s_mask.shape)
#print(plane_mask.shape,s_plane_mask.shape)
s_weights=self.cluster(l_sf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],s_plane_mask)
s_cost=torch.sum(torch.sum(plane*s_weights,-2,keepdim=True),-3,keepdim=True)/torch.sum(s_weights)
l_weights=self.cluster(l_lf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],l_plane_mask)
l_cost=torch.sum(torch.sum(plane*l_weights,-2,keepdim=True),-3,keepdim=True)/torch.sum(l_weights)
with torch.no_grad():
# print(plane_mask.shape)
# plane_mask=plane_mask-torch.where(s_plane_mask+l_plane_mask>0,one,zero)
# print(plane_mask.shape)
plane_mask=plane_mask.view(1,plane_mask.shape[0],plane_mask.shape[1],1) \
.expand(1,plane_mask.shape[0],plane_mask.shape[1],plane.shape[-1])
#print(plane_mask.shape)
s_plane_mask=s_plane_mask.view(plane_mask.shape[0],plane_mask.shape[1],plane_mask.shape[2],1) \
.expand(plane_mask.shape[0],plane_mask.shape[1],plane_mask.shape[2],plane.shape[-1])
l_plane_mask=l_plane_mask.view(plane_mask.shape[0],plane_mask.shape[1],plane_mask.shape[2],1) \
.expand(plane_mask.shape[0],plane_mask.shape[1],plane_mask.shape[2],plane.shape[-1])
# plane=torch.where(s_plane_mask==1,s_cost*(1-s_weights)+s_weights*plane,plane)
# plane=torch.where(l_plane_mask==1,l_cost*(1-l_weights)+l_weights*plane,plane)
cost_volume[...,xp1:xp2,yp1:yp2,:]=torch.where(s_plane_mask==1,s_cost*s_weights+(1-s_weights)*plane,plane)
cost_volume[...,xp1:xp2,yp1:yp2,:]=torch.where(l_plane_mask==1,l_cost*l_weights+(1-l_weights)*plane,plane)
exist=torch.where(s_plane_mask-l_plane_mask>0,one,zero)
#print(plane_mask.shape,s_plane_mask.shape)
weights=self.cluster(torch.cat([l_lf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],l_sf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2]],-3),plane_mask[...,0])
costs=torch.sum(torch.sum(plane*weights,-2,keepdim=True),-3,keepdim=True)/torch.sum(exist)
plane_mask=plane_mask-exist
cost_volume[...,xp1:xp2,yp1:yp2,:]=torch.where(plane_mask==1,costs*weights,plane)
#print(time.time()-start_time)
print(time.time()-start_time)
#print(time.time()-start_time)
time.sleep(100)
# #ss_argmin
# disparity[...,x1:x2,y1:y2]=ss_argmin(cost_volume,min_d,max_d)
# #refinement
# refine=torch.zeros_like(disparity)[...,x1:x2,y1:y2]
# for j in range(min_r,max_r+1):
# plane_mask=torch.where(object_r==j,one,zero)[x1:x2,y1:y2]
# xp1,xp2,yp1,yp2=crop(plane_mask)
# plane_mask=plane_mask[xp1:xp2,yp1:yp2]
# s_plane_mask=plane_mask*s_mask[x1:x2,y1:y2][xp1:xp2,yp1:yp2]
# l_plane_mask=plane_mask*l_mask[x1:x2,y1:y2][xp1:xp2,yp1:yp2]
# plane_mask=plane_mask-torch.where(s_plane_mask+l_plane_mask>0,one,zero)
# plane=disparity[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2]*plane_mask
# s_weights=self.cluster(l_sf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],s_plane_mask)
# s_cost=torch.sum(torch.sum(plane*s_weights,-2,keepdim=True),-3,keepdim=True)/torch.sum(s_weights)
# l_weights=self.cluster(l_lf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],l_plane_mask)
# l_cost=torch.sum(torch.sum(plane*l_weights,-2),-2)/torch.sum(l_weights)
# weights=self.cluster(l_lf[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2],plane_mask)
# costs=torch.sum(torch.sum(plane*weights,-2,keepdim=True),-3,keepdim=True)/torch.sum(weights)
# plane=torch.where(s_plane_mask==1,s_cost*s_weights,plane)
# plane=torch.where(l_plane_mask==1,l_cost*l_weights,plane)
# plane=torch.where(plane_mask==1,cost*weights,plane)
# disparity[...,x1:x2,y1:y2][...,xp1:xp2,yp1:yp2]=plane
return cost_volume
| 44.929487 | 159 | 0.563704 |
da6c45691c6bca95f1184fe4e0f2066502e76730 | 10,169 | py | Python | src/GUI/image_processing.py | Team-Ausdroid-Unimelb/rm_ai_challenge_2020s2_koala | 6d1e141e33a27ab3f8f35dd0f39649b4f9281bf3 | [
"MIT"
] | null | null | null | src/GUI/image_processing.py | Team-Ausdroid-Unimelb/rm_ai_challenge_2020s2_koala | 6d1e141e33a27ab3f8f35dd0f39649b4f9281bf3 | [
"MIT"
] | null | null | null | src/GUI/image_processing.py | Team-Ausdroid-Unimelb/rm_ai_challenge_2020s2_koala | 6d1e141e33a27ab3f8f35dd0f39649b4f9281bf3 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import time
import os
from pathlib import Path
from PIL import ImageTk,Image
import GUI_support
import tkinter as tk
from predictions import LabelledImage, Armour
from filemanagement import *
from tkinter import messagebox
CONFIDENCE_THRESHOLD = 0.05
## Load necessary files for model
def load_model(weights, cfg, names):
try:
model = cv2.dnn.readNet(weights, cfg)
except Exception as e:
print(e)
print('Model loading failed.')
return
output_layers = model.getUnconnectedOutLayersNames()
with open(names, "r") as file:
classes = [line.strip() for line in file.readlines()]
colors = np.random.uniform(0, 255, size=(len(classes), 3))
return model, output_layers, classes, colors
def load_image(image_path):
try:
image = cv2.imread(image_path)
image.shape
except Exception as e:
print(e)
print('Image loading failed.')
return
return image
## Get NMS bounding boxes (standard result)
def get_nms_bboxes(outputs, height, width, confidence_threshold, classes):
bboxes, confidences, class_ids, nms_bboxes = list(), list(), list(), list()
## Extract all bounding boxes from the output of the model
for output in outputs:
for detect in output:
scores = detect[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
if confidence > confidence_threshold:
center_x, center_y = int(detect[0] * width), int(detect[1] * height)
w, h = int(detect[2] * width), int(detect[3] * height)
x, y = int(center_x - w/2), int(center_y - h/2)
bboxes.append([x, y, w, h])
confidences.append(float(confidence))
class_ids.append(class_id)
## Calculate Non-Maximum Supression bounding boxes (remove highly overlapped bboxes)
indexes = cv2.dnn.NMSBoxes(bboxes, confidences, confidence_threshold, 0.4)
if len(indexes) == 0:
return list()
for i in indexes.flatten():
nms_bboxes.append([bboxes[i], confidences[i], class_ids[i], str(classes[class_ids[i]])])
return nms_bboxes
## Get the largest armor bounding box in the picture
def get_largest_bbox(nms_bboxes):
pose_bboxes = [bbox for bbox in nms_bboxes if bbox[2] > 3]
armor_bboxes = [bbox for bbox in nms_bboxes if bbox[2] <= 3]
if len(armor_bboxes) == 0:
return list()
max_area = 0
for armor_bbox in armor_bboxes:
bbox, confidence, class_id, text = armor_bbox
if bbox[2] * bbox[3] > max_area:
max_area, largest_bbox = bbox[2] * bbox[3], armor_bbox
if len(pose_bboxes) == 0:
return [largest_bbox]
return append_pose([largest_bbox], pose_bboxes)
## Get the largest armor bounding boxes for 4 armor types in the picture
def get_largest_bboxes(nms_bboxes):
pose_bboxes = [bbox for bbox in nms_bboxes if bbox[2] > 3]
armor_bboxes = [bbox for bbox in nms_bboxes if bbox[2] <= 3]
if len(armor_bboxes) == 0:
return list()
max_bbox_area = [0] * 4
max_bbox_index = [-1] * 4
for i in range(len(armor_bboxes)):
bbox, confidence, armor_id, text = armor_bboxes[i]
if bbox[2] * bbox[3] > max_bbox_area[armor_id]:
max_bbox_area[armor_id], max_bbox_index[armor_id] = bbox[2] * bbox[3], i
# print(max_bbox_area, max_bbox_index)
filtered_armor_bboxes = list()
for i in list(filter(lambda x: x >= 0, max_bbox_index)):
filtered_armor_bboxes.append(armor_bboxes[i])
# print(filtered_armor_bboxes)
if len(pose_bboxes) == 0:
return filtered_armor_bboxes
return append_pose(filtered_armor_bboxes, pose_bboxes)
## Append the proper pose label to each armor labels
def append_pose(armor_bboxes, pose_bboxes):
## Check if the center of bbox_in is within bbox_out
def contain_bbox(bbox_out, bbox_in):
x_o, y_o, w_o, h_o = bbox_out
x_i, y_i, _, _ = bbox_in
return x_i > x_o and x_i < x_o + w_o and y_i > y_o and y_i < y_o + h_o
for i in range(len(armor_bboxes)):
bbox_in, _, _, text_in = armor_bboxes[i]
for pose_bbox in pose_bboxes:
bbox_out, _, _, text_out = pose_bbox
if contain_bbox(bbox_out, bbox_in):
break
armor_bboxes[i][3] = [text_in, text_out] #text_in + '_' + text_out
return armor_bboxes
# accepts image and outputs layers as parameters
def positioning_bboxes(image, model, output_layers, classes, mode = 3):
height, width, channels = image.shape
## Image preprocessing
blob = cv2.dnn.blobFromImage(image, scalefactor=1.0/255, size=(320, 320), mean=(0, 0, 0), swapRB=True, crop=False)
## Forward pass through the model and get the raw output
model.setInput(blob)
outputs = model.forward(output_layers)
## Obtain bounding boxes from the raw output
nms_bboxes = get_nms_bboxes(outputs, height, width, CONFIDENCE_THRESHOLD, classes)
## Mode 2 - Find the largest armor pad and identify the pose
if mode == 2:
return get_largest_bbox(nms_bboxes)
## Mode 3 - Consider the possibility of multiple robots in the image
elif mode == 3:
return get_largest_bboxes(nms_bboxes)
## Mode 1 - Display armor and pose separately
else:
return nms_bboxes
## Draw the predicted bounding boxes and labels on the image
def draw_labels(predictions_directory, labelled_image, filename, image, bboxes, classes, colors):
font = cv2.FONT_HERSHEY_PLAIN
for i in range(len(bboxes)):
bbox, confidence, class_id, text = bboxes[i]
x, y, w, h = bbox
if len(text) > 1:
robot = text[0]
pose = text[1]
elif len(text) == 1:
robot = text[0]
pose = ""
else:
robot = ""
pose = ""
confidence = round(confidence, 3)
armour = Armour(robot, pose, bbox, confidence)
labelled_image.armours.append(armour)
label = robot + "_" + pose + " " + str(confidence)
color = colors[class_id]
cv2.rectangle(image, (x,y), (x+w, y+h), color, 2)
cv2.putText(image, label, (x, y - 5), font, 1, color, 1)
## Output prediction result to file
path = predictions_directory / filename.name
cv2.imwrite(os.path.abspath(path), image)
def image_detect(image_file_state):
### check if there's any image uploaded
filenames = image_file_state.filenames
if not filenames: # no image files are uploaded
messagebox.showinfo('Information', 'Please upload images first')
return
### check if all config files have been uploaded by the user ###
print("Checking config files...")
files_needed = ["names", "cfg", "weights"]
# check if all required files are present. If not, show a popup
for file in files_needed:
if file not in image_file_state.config_files:
messagebox.showinfo('Information', 'Please upload names, cfg and weights files first')
print("No compatible config files were found.")
return
## Load necessary files for model (must do this before detection)
weights = image_file_state.config_files["weights"]
cfg = image_file_state.config_files["cfg"]
names = image_file_state.config_files["names"]
model, output_layers, classes, colors = load_model(weights, cfg, names)
mode = 3
# Get the folder from where the images were uploaded.
images_directory = image_file_state.images_folder
# Create predictions directory
predictions_directory = images_directory / "predictions"
Path.mkdir(predictions_directory, exist_ok=True)
print("Labelling...")
labelled_image = None
labelled_images = []
image_file_state.clear_labelled_images() # clear labelled images output
time_labelstart = time.perf_counter()
# Select and draw bounding boxes on every image.
for i, filename in enumerate(filenames):
image_path = os.path.abspath(filename)
# Load the image
image = load_image(image_path)
# Predict labels
tstart = time.perf_counter()
bboxes = positioning_bboxes(image, model, output_layers, classes, mode)
speed = 1 / (time.perf_counter() - tstart)
# Labelled Image Data
labelled_image = LabelledImage(i, filename.name, round(speed,2), armours=[])
draw_labels(predictions_directory, labelled_image, filename, image, bboxes, classes, colors)
labelled_images.append(labelled_image)
print("Labelling done in ", round(time.perf_counter() - time_labelstart, 3), "s")
# Save labelled images output
image_file_state.set_labelled_images(labelled_images)
print("Displaying predictions...")
image_file_state.clear_all_images() # clear all images
image_file_state.set_current_img_num(0) # reset image number
image_file_state.clear_export_content() # clear export content
top = GUI_support.w
export_content = ""
for i, filename in enumerate(filenames):
im = Image.open(predictions_directory / filename.name)
im.thumbnail(image_file_state.image_size, Image.ANTIALIAS)
image_file_state.images.append(ImageTk.PhotoImage(im))
# Save the labelled images output data
write_output(top.Output, image_file_state.labelled_images[i])
export_content = export_content + "\n" + str(top.Output.get(1.0, END))
# Save export content
image_file_state.set_export_content(export_content)
# Display the first image
top.image_label.configure(image = image_file_state.images[0])
# Display first image output
write_output(top.Output, image_file_state.labelled_images[0])
print("Predictions displayed.")
return | 32.697749 | 118 | 0.643328 |
79c08e12a3cc83a24975e77d1795da4b54415ab4 | 1,474 | py | Python | examples/plotting/file/bar_stacked_split.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | 15,193 | 2015-01-01T05:11:45.000Z | 2022-03-31T19:30:20.000Z | examples/plotting/file/bar_stacked_split.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | 9,554 | 2015-01-01T03:16:54.000Z | 2022-03-31T22:59:39.000Z | examples/plotting/file/bar_stacked_split.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | 4,829 | 2015-01-02T03:35:32.000Z | 2022-03-30T16:40:26.000Z | ''' A split, horizontal stacked bar chart using plain Python lists.
.. bokeh-example-metadata::
:apis: bokeh.plotting.Figure.hbar_stack
:refs: :ref:`userguide_categorical` > :ref:`userguide_categorical_bars` > :ref:`userguide_categorical_bars_stacked`
:keywords: bars, categorical, stacked
'''
from bokeh.models import ColumnDataSource
from bokeh.palettes import GnBu3, OrRd3
from bokeh.plotting import figure, show
fruits = ['Apples', 'Pears', 'Nectarines', 'Plums', 'Grapes', 'Strawberries']
years = ["2015", "2016", "2017"]
exports = {'fruits' : fruits,
'2015' : [2, 1, 4, 3, 2, 4],
'2016' : [5, 3, 4, 2, 4, 6],
'2017' : [3, 2, 4, 4, 5, 3]}
imports = {'fruits' : fruits,
'2015' : [-1, 0, -1, -3, -2, -1],
'2016' : [-2, -1, -3, -1, -2, -2],
'2017' : [-1, -2, -1, 0, -2, -2]}
p = figure(y_range=fruits, height=350, x_range=(-16, 16), title="Fruit import/export, by year",
toolbar_location=None)
p.hbar_stack(years, y='fruits', height=0.9, color=GnBu3, source=ColumnDataSource(exports),
legend_label=["%s exports" % x for x in years])
p.hbar_stack(years, y='fruits', height=0.9, color=OrRd3, source=ColumnDataSource(imports),
legend_label=["%s imports" % x for x in years])
p.y_range.range_padding = 0.1
p.ygrid.grid_line_color = None
p.legend.location = "top_left"
p.axis.minor_tick_line_color = None
p.outline_line_color = None
show(p)
| 35.95122 | 119 | 0.622795 |
af22aa254fa3ac30a13a5cd6ba066d9bae8680b2 | 698 | py | Python | test/test_device.py | kinow-io/kinow-python-sdk | 4c1699a3c78048b84287bd049a669651a5b4e2d5 | [
"Apache-2.0"
] | 1 | 2019-06-26T14:24:54.000Z | 2019-06-26T14:24:54.000Z | test/test_device.py | kinow-io/kinow-python-sdk | 4c1699a3c78048b84287bd049a669651a5b4e2d5 | [
"Apache-2.0"
] | null | null | null | test/test_device.py | kinow-io/kinow-python-sdk | 4c1699a3c78048b84287bd049a669651a5b4e2d5 | [
"Apache-2.0"
] | 1 | 2018-02-01T10:08:40.000Z | 2018-02-01T10:08:40.000Z | # coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 1.4.58
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kinow_client
from kinow_client.rest import ApiException
from kinow_client.models.device import Device
class TestDevice(unittest.TestCase):
""" Device unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testDevice(self):
"""
Test Device
"""
model = kinow_client.models.device.Device()
if __name__ == '__main__':
unittest.main()
| 16.232558 | 68 | 0.660458 |
c4ec47371601eafa5acd4c3980934259ce1df47f | 2,832 | py | Python | py3status/modules/pingdom.py | Lujeni/py3status | ed5cc6b7c15c45a04ae8fe9f659175d6f4effac3 | [
"BSD-2-Clause"
] | null | null | null | py3status/modules/pingdom.py | Lujeni/py3status | ed5cc6b7c15c45a04ae8fe9f659175d6f4effac3 | [
"BSD-2-Clause"
] | null | null | null | py3status/modules/pingdom.py | Lujeni/py3status | ed5cc6b7c15c45a04ae8fe9f659175d6f4effac3 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Dynamically display the latest response time of the configured checks using
the Pingdom API.
We also verify the status of the checks and colorize if needed.
Pingdom API doc : https://www.pingdom.com/services/api-documentation-rest/
#NOTE: This module needs the 'requests' python module from pypi
https://pypi.python.org/pypi/requests
"""
import requests
from time import time
class Py3status:
"""
Configuration parameters:
- app_key : create an APP KEY on pingdom first
- cache_timeout : how often to refresh the check from pingdom
- checks : comma separated pindgom check names to display
- login : pingdom login
- max_latency : maximal latency before coloring the output
- password : pingdom password
- request_timeout : pindgom API request timeout
"""
# available configuration parameters
app_key = ''
cache_timeout = 600
checks = ''
login = ''
max_latency = 500
password = ''
request_timeout = 15
def pingdom_checks(self, i3s_output_list, i3s_config):
response = {'full_text': ''}
# parse some configuration parameters
if not isinstance(self.checks, list):
self.checks = self.checks.split(',')
r = requests.get(
'https://api.pingdom.com/api/2.0/checks',
auth=(self.login, self.password),
headers={'App-Key': self.app_key},
timeout=self.request_timeout,
)
result = r.json()
if 'checks' in result:
for check in [
ck for ck in result['checks'] if ck['name'] in self.checks
]:
if check['status'] == 'up':
response['full_text'] += '{}: {}ms, '.format(
check['name'],
check['lastresponsetime']
)
if check['lastresponsetime'] > self.max_latency:
response.update(
{'color': i3s_config['color_degraded']}
)
else:
response['full_text'] += '{}: DOWN'.format(
check['name'],
check['lastresponsetime']
)
response.update({'color': i3s_config['color_bad']})
response['full_text'] = response['full_text'].strip(', ')
response['cached_until'] = time() + self.cache_timeout
return response
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
}
while True:
print(x.pingdom_checks([], config))
sleep(1)
| 32.181818 | 75 | 0.548729 |
24ae6ab491ffea7cda20d15e793b8c6dee69e414 | 1,990 | py | Python | tests/test_vardict.py | msk-access/postprocessing_variant_calls | fb14d35249d0eac9e263b3b5ef97fbd2ea908d9e | [
"MIT"
] | null | null | null | tests/test_vardict.py | msk-access/postprocessing_variant_calls | fb14d35249d0eac9e263b3b5ef97fbd2ea908d9e | [
"MIT"
] | null | null | null | tests/test_vardict.py | msk-access/postprocessing_variant_calls | fb14d35249d0eac9e263b3b5ef97fbd2ea908d9e | [
"MIT"
] | null | null | null | import pytest # type: ignore
import os
from typer.testing import CliRunner
from pdb import set_trace as bp
from postprocessing_variant_calls.main import app
runner = CliRunner()
vardict_single_calls = [
['vardict', 'single', 'filter', '--inputVcf', 'tests/data/vardict/single_test.vcf' , '--tsampleName', 'Myeloid200-1', '-ad','1', '-o', 'tests/data/vardict/single'],
]
vardict_matched = [
['vardict', 'case-control', 'filter', '--inputVcf', 'tests/data/vardict/case_control_test.vcf' , '--tsampleName', 'C-C1V52M-L001-d', '-ad','1' , '-o', 'tests/data/vardict/two']
]
@pytest.mark.parametrize("call", vardict_single_calls)
def test_single(call):
# vardict_tests['single']
result = runner.invoke(app, call)
result.stdout
assert result.exit_code == 0
assert '' in result.stdout
assert os.path.exists("tests/data/vardict/single/single_test_STDfilter_complex.vcf") == True
assert os.path.exists("tests/data/vardict/single/single_test_STDfilter.vcf") == True
assert os.path.exists("tests/data/vardict/single/single_test_STDfilter.txt") == True
os.remove("tests/data/vardict/single/single_test_STDfilter_complex.vcf")
os.remove("tests/data/vardict/single/single_test_STDfilter.vcf")
os.remove("tests/data/vardict/single/single_test_STDfilter.txt")
@pytest.mark.parametrize("call", vardict_matched)
def test_two(call):
result = runner.invoke(app, call)
assert result.exit_code == 0
assert '' in result.stdout
assert os.path.exists("tests/data/vardict/two/case_control_test_STDfilter_complex.vcf") == True
assert os.path.exists("tests/data/vardict/two/case_control_test_STDfilter.vcf") == True
assert os.path.exists("tests/data/vardict/two/case_control_test_STDfilter.txt") == True
os.remove("tests/data/vardict/two/case_control_test_STDfilter_complex.vcf")
os.remove("tests/data/vardict/two/case_control_test_STDfilter.vcf")
os.remove("tests/data/vardict/two/case_control_test_STDfilter.txt")
| 45.227273 | 180 | 0.736683 |
f99ca4fb316d61be1e719bb510d63c13a8c90eee | 1,013 | py | Python | tests/test_helper.py | patrick-gxg/mashumaro | a6bd88477d30deeb130b19856c0fe9e6e78a78d9 | [
"Apache-2.0"
] | 394 | 2018-11-09T11:55:11.000Z | 2022-03-27T07:39:48.000Z | tests/test_helper.py | patrick-gxg/mashumaro | a6bd88477d30deeb130b19856c0fe9e6e78a78d9 | [
"Apache-2.0"
] | 70 | 2018-12-10T19:43:01.000Z | 2022-03-17T07:37:45.000Z | tests/test_helper.py | patrick-gxg/mashumaro | a6bd88477d30deeb130b19856c0fe9e6e78a78d9 | [
"Apache-2.0"
] | 29 | 2018-12-10T19:44:19.000Z | 2022-03-11T00:12:26.000Z | from mashumaro import field_options
from mashumaro.types import SerializationStrategy
def test_field_options_helper():
assert field_options() == {
"serialize": None,
"deserialize": None,
"serialization_strategy": None,
"alias": None,
}
def serialize(x):
return x # pragma no cover
def deserialize(x):
return x # pragma no cover
class TestSerializationStrategy(SerializationStrategy): # pragma no cover
def deserialize(self, value):
return value
def serialize(self, value):
return value
serialization_strategy = TestSerializationStrategy()
alias = "alias"
assert field_options(
serialize=serialize,
deserialize=deserialize,
serialization_strategy=serialization_strategy,
alias=alias,
) == {
"serialize": serialize,
"deserialize": deserialize,
"serialization_strategy": serialization_strategy,
"alias": alias,
}
| 25.325 | 78 | 0.640671 |
921d3d23dee9d403b4241a49fe395fcbd66faae2 | 4,640 | py | Python | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/logging/verify.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/logging/verify.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/logging/verify.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | null | null | null | """ Common verification functions for logging """
# Python
import re
import logging
from datetime import datetime
# Genie
from genie.metaparser.util.exceptions import SchemaEmptyParserError
# Logging
from genie.libs.sdk.apis.iosxe.logging.get import get_logging_logs
log = logging.getLogger(__name__)
def is_logging_string_matching_regex_logged(device, oldest_timestamp, regex):
""" Verifies string that matches regex is logged - ignoring logs from before passed timestamp
Args:
device ('obj'): device to use
oldest_timestamp ('str'): oldest timestamp to match (format: hh:mm:ss.sss)
regex ('str'): regex string to match
Returns:
timestamp of command if found else False ('str')
Raises:
None
"""
logs = get_logging_logs(device=device)
p1 = re.compile(regex)
FMT = "%b %d %H:%M:%S.%f"
FMT1 = "%b %d %H:%M:%S"
for line in reversed(logs):
line = line.strip()
m = p1.match(line)
if m:
timestamp = m.groupdict()["timestamp"]
if '.' in timestamp:
t1 = datetime.strptime(timestamp, FMT)
else:
t1 = datetime.strptime(timestamp, FMT1)
if '.' in oldest_timestamp:
t2 = datetime.strptime(oldest_timestamp, FMT)
else:
t2 = datetime.strptime(oldest_timestamp, FMT1)
tdelta = t1 - t2
if tdelta.days < 0:
return False
else:
return timestamp
return False
def is_logging_bfd_down_logged(*args, **kwargs):
""" Verifies bfd is logged down within specified time from issued command
Args:
device ('obj'): device to use
oldest_timestamp ('str'): oldest timestamp to match (format: hh:mm:ss.sss)
Returns:
('str') timestamp of command if found else False
Raises:
None
"""
log.info("Checking logs for BFD_SESS_DOWN: ECHO FAILURE")
# Jan 24 18:13:10.814 EST: %BFDFSM-6-BFD_SESS_DOWN: BFD-SYSLOG: BFD session ld:2039 handle:2,is going Down Reason: ECHO FAILURE
# *Jan 24 18:13:10.814 EST: %BFDFSM-6-BFD_SESS_DOWN: BFD-SYSLOG: BFD session ld:2039 handle:2,is going Down Reason: ECHO FAILURE
return is_logging_string_matching_regex_logged(
regex="^\*?(?P<timestamp>\w+ +\d+ +\S+) +\w+: +%BFDFSM-6-BFD_SESS_DOWN.*ECHO FAILURE$",
*args,
**kwargs
)
def is_logging_ospf_neighbor_down_logged(*args, **kwargs):
""" Verifies ospf neighbor is logged down within specified time from issued command
Args:
device ('obj'): device to use
oldest_timestamp ('str'): oldest timestamp to match (format: hh:mm:ss.sss)
Returns:
('str') timestamp of command if found else False
Raises:
None
"""
log.info("Checking logs for OSPF-5-ADJCHG: Neighbor Down: BFD node down")
# Jan 24 18:13:10.814 EST: %OSPF-5-ADJCHG: Process 1111, Nbr 10.24.24.24 on GigabitEthernet1 from FULL to DOWN, Neighbor Down: BFD node down
# *Jan 24 18:13:10.814 EST: %OSPF-5-ADJCHG: Process 1111, Nbr 10.24.24.24 on GigabitEthernet1 from FULL to DOWN, Neighbor Down: BFD node down
# Jan 24 18:13:10.814: %OSPF-5-ADJCHG: Process 1111, Nbr 10.24.24.24 on GigabitEthernet1 from FULL to DOWN, Neighbor Down: BFD node down
return is_logging_string_matching_regex_logged(
regex=r"^\*?(?P<timestamp>\w+ +\d+ +\S+)( +\w+)?: +%OSPF-5-ADJCHG.*FULL +to +DOWN, +Neighbor +Down: +BFD +node +down$",
*args,
**kwargs
)
def is_logging_static_route_down_logged(*args, **kwargs):
""" Verifies static route is logged down within specified time from issued command
Args:
device ('obj'): device to use
oldest_timestamp ('str'): oldest timestamp to match (format: hh:mm:ss.sss)
Returns:
('str') timestamp of command if found else False
Raises:
None
"""
log.info("Checking logs for IP-ST: not active state")
# Jan 24 18:13:10.814 EST: IP-ST(default): 10.4.1.1/32 [1], GigabitEthernet2 Path = 4 6, no change, not active state
# *Jan 24 18:13:10.814 EST: IP-ST(default): 10.4.1.1/32 [1], GigabitEthernet3 Path = 4 6, no change, not active state
# Oct 24 09:48:52.617: IP-ST(default): 1.1.1.1/32 [1], GigabitEthernet0/2/1 Path = 1 8, no change, not active state
return is_logging_string_matching_regex_logged(
regex=r"^\*?(?P<timestamp>\w+ +\d+ +\S+)( +\w+)?: +IP-ST.*not +active +state$",
*args,
**kwargs
)
| 35.692308 | 145 | 0.618534 |
356de4bc6d2e5ab5dbe49cd74eefff5a0cd27db4 | 6,646 | py | Python | helios/pipeViewer/pipe_view/gui/dialogs/select_db_dlg.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 44 | 2019-12-13T06:39:13.000Z | 2022-03-29T23:09:28.000Z | helios/pipeViewer/pipe_view/gui/dialogs/select_db_dlg.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 222 | 2020-01-14T21:58:56.000Z | 2022-03-31T20:05:12.000Z | helios/pipeViewer/pipe_view/gui/dialogs/select_db_dlg.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 19 | 2020-01-03T19:03:22.000Z | 2022-01-09T08:36:20.000Z | # # @package select_db_dbg.py
# @brief Dialog for selecting a database file
import sys
import os
import wx
import wx.lib.scrolledpanel as scrolledpanel
# # Dialog for selecting an Argos database.
#
# Use ShowModal to display the dialog and then use GetPrefix to see selected
# filename
class SelectDatabaseDlg(wx.Dialog):
# # File name and extension for simulation info files
#
# This can be appended to a prefix to get simulation information
INFO_FILE_EXTENSION = 'simulation.info'
# # Initialized the dialog
# @param init_prefix Value of prefix to show in the box by default.
# Must be a str or None
def __init__(self, init_prefix = None):
wx.Dialog.__init__(self,
None,
title = 'Select an Argos transaction database',
size = (800, 380))
if not isinstance(init_prefix, str) and init_prefix is not None:
raise TypeError('init_prefix must be a str or None, is a {0}'.format(type(init_prefix)))
self.__prefix = None # Updated in CheckSelectionState
if init_prefix is not None:
filepath = init_prefix + self.INFO_FILE_EXTENSION
else:
filepath = os.getcwd()
# Controls
info = wx.StaticText(self,
label = 'Specify a {0} file from an argos transaction database' \
.format(self.INFO_FILE_EXTENSION))
info.Wrap(self.GetSize()[0] - 20)
self.__file_txt = wx.TextCtrl(self, size = (160, -1), value = filepath)
self.__orig_txt_colour = self.__file_txt.GetBackgroundColour()
file_btn = wx.Button(self, id = wx.ID_FIND)
quit_btn = wx.Button(self, id = wx.ID_EXIT)
self.__ok_btn = wx.Button(self, id = wx.ID_OK)
file_info_box = wx.StaticBox(self, label = 'Simulation Info')
self.__scroll_win = scrolledpanel.ScrolledPanel(self)
self.__scroll_win.SetupScrolling()
self.__file_info = wx.StaticText(self.__scroll_win, label = '')
# Bindings
quit_btn.Bind(wx.EVT_BUTTON, self.__OnClose)
self.__ok_btn.Bind(wx.EVT_BUTTON, self.__OnOk)
file_btn.Bind(wx.EVT_BUTTON, self.__OnFindFile)
self.__file_txt.Bind(wx.EVT_TEXT, self.__OnChangeFilename)
# Layout
sbs = wx.StaticBoxSizer(file_info_box, wx.HORIZONTAL)
sbs.Add(self.__scroll_win, 1, wx.EXPAND | wx.ALL, 5)
sws = wx.BoxSizer(wx.VERTICAL)
sws.Add(self.__file_info, 0, wx.EXPAND)
self.__scroll_win.SetSizer(sws)
sws.Fit(self.__scroll_win)
open_row = wx.BoxSizer(wx.HORIZONTAL)
open_row.Add(self.__file_txt, 1, wx.EXPAND)
open_row.Add((10, 1), 0, wx.EXPAND)
open_row.Add(file_btn, 0, wx.EXPAND)
buttons_row = wx.BoxSizer(wx.HORIZONTAL)
buttons_row.Add(quit_btn, 0, wx.ALIGN_LEFT | wx.ALIGN_BOTTOM)
buttons_row.Add((1, 1), 1, wx.EXPAND)
buttons_row.Add(self.__ok_btn, 0, wx.ALIGN_BOTTOM)
sz = wx.BoxSizer(wx.VERTICAL)
sz.Add(info, 0, wx.EXPAND)
sz.Add((1, 15), 0, wx.EXPAND)
sz.Add(open_row, 0, wx.EXPAND)
sz.Add((1, 25), 0, wx.EXPAND)
sz.Add(sbs, 1, wx.EXPAND)
sz.Add((1, 25), 0, wx.EXPAND)
sz.Add(buttons_row, 0, wx.EXPAND)
border = wx.BoxSizer(wx.VERTICAL)
border.Add(sz, 1, wx.EXPAND | wx.ALL, 10)
self.SetSizer(border)
self.SetAutoLayout(1)
self.__CheckSelectionState()
def Show(self):
raise NotImplementedError('Cannot Show() this dialog. Use ShowModal instead')
# # Gets the prefix selected by the dialog
# @return The prefix selected while the dialog was shown. Is a string if
# found and None if no database was chosen
#
# This should be checked after invoking ShowModal() on this object
def GetPrefix(self):
return self.__prefix
# # Handler for Close button
def __OnClose(self, evt):
self.__prefix = None
self.EndModal(wx.CANCEL)
# # Handler for Ok button
def __OnOk(self, evt):
# self.__filename already set before this button was enabled
self.EndModal(wx.OK)
# # Handler for Find button
def __OnFindFile(self, evt):
dlg = wx.FileDialog(self, "Select Argos database simulation.info file",
defaultFile = self.__file_txt.GetValue(),
wildcard = 'Argos Simulation info files (*{0})|*{0}' \
.format(self.INFO_FILE_EXTENSION))
dlg.ShowModal()
fp = dlg.GetPath()
if fp is not None and fp != '':
self.__file_txt.SetValue(fp)
self.__CheckSelectionState()
# # Handler for Changing the filename in file_txt
def __OnChangeFilename(self, evt):
self.__CheckSelectionState()
# # Checks on the value in the self.__file_txt box to see if it points to a
# valid simulation
#
# Updates self.__prefix
# Updates or clears self.__file_info and en/disables self.__ok_btn depending
# on whether selection points to a valid file. Also changes colors of box
def __CheckSelectionState(self):
filepath = self.__file_txt.GetValue()
suffix_pos = filepath.find(self.INFO_FILE_EXTENSION)
if suffix_pos != len(filepath) - len(self.INFO_FILE_EXTENSION):
valid = False
reason = 'Filename does not contain suffix "{0}"'.format(self.INFO_FILE_EXTENSION)
elif not os.path.exists(filepath):
valid = False
reason = 'File does not exist'
else:
try:
summary = ''
with open(filepath, 'r') as f:
while True:
ln = f.readline()
if ln == '':
break
summary += ln
self.__file_info.SetLabel(summary)
except IOError:
valid = False
reason = 'Cannot open file for reading'
else:
valid = True
reason = ''
self.__ok_btn.Enable(valid)
if valid:
print("***" + filepath[:suffix_pos])
self.__prefix = filepath[:suffix_pos]
self.__file_txt.SetBackgroundColour(wx.Colour(235, 255, 235))
else:
self.__prefix = None
self.__file_info.SetLabel('')
self.__file_txt.SetBackgroundColour(wx.Colour(255, 220, 220))
self.__scroll_win.FitInside()
self.__scroll_win.Layout()
| 35.351064 | 100 | 0.599759 |
a96c94b0d02dc6e2b87ecdd680565e449a0bead0 | 142 | py | Python | exercicios-turtle/.history/conversor_temp_20210624131905.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | 1 | 2021-11-28T18:49:21.000Z | 2021-11-28T18:49:21.000Z | exercicios-turtle/.history/conversor_temp_20210624131905.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | null | null | null | exercicios-turtle/.history/conversor_temp_20210624131905.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | null | null | null | print("Abaixo digite o valor da temperatura em graus para saber sua conversão em fahrenheit")
temp = int(input('digite a temperatura'))
if = | 35.5 | 93 | 0.760563 |
a1cb2f796e79563a804c723eda8f71f2a95db2a7 | 8,028 | py | Python | segeval/ml/__init__.py | cfournie/segmentation.evaluation | c4140b48112aac697aa7f05effe8026648ebc501 | [
"BSD-3-Clause"
] | 27 | 2015-03-22T23:06:47.000Z | 2022-01-21T23:21:05.000Z | segeval/ml/__init__.py | cfournie/segmentation.evaluation | c4140b48112aac697aa7f05effe8026648ebc501 | [
"BSD-3-Clause"
] | 4 | 2015-03-12T18:27:08.000Z | 2019-10-24T13:09:31.000Z | segeval/ml/__init__.py | cfournie/segmentation.evaluation | c4140b48112aac697aa7f05effe8026648ebc501 | [
"BSD-3-Clause"
] | 7 | 2015-03-12T18:29:27.000Z | 2019-01-25T08:52:05.000Z | '''
Machine learning metric package. This package a variety of traditional machine
learning metrics that have been adapted for use in segmentation.
.. moduleauthor:: Chris Fournier <chris.m.fournier@gmail.com>
'''
from __future__ import absolute_import, division
from decimal import Decimal
from collections import defaultdict
from segeval.util import SegmentationMetricError
from segeval.util.math import mean
from segeval.util.lang import enum
Average = enum('micro', 'macro')
def __value_micro_macro__(fnc, arguments, classification=None,
version=Average.micro):
def __compute__(fnc, classes, arguments, classification, version):
if classification is None:
if version is Average.micro:
# Micro-average
numerator, denominator = 0, 0
for classification in classes:
arguments['classification'] = classification
arguments['return_parts'] = True
class_numerator, class_denominator = fnc(**arguments)
numerator += class_numerator
denominator += class_denominator
if numerator == 0:
return 0
else:
return Decimal(numerator) / denominator
elif version is Average.macro:
# Macro-average
values = list()
for classification in classes:
arguments['classification'] = classification
value = fnc(**arguments)
values.append(value)
return mean(values)
else:
raise SegmentationMetricError('Unrecognized type of averaging;\
expected Average.micro or Average.macro')
else:
return fnc(**arguments)
if isinstance(arguments['matrix'], ConfusionMatrix):
classes = arguments['matrix'].classes()
return __compute__(fnc, classes, arguments, classification, version)
else:
values = dict()
new_arguments = dict(arguments)
for label, matrix in arguments['matrix'].items():
new_arguments['matrix'] = matrix
classes = matrix.classes()
values[label] = __compute__(
fnc, classes, new_arguments, classification, version)
return values
def __precision__(matrix, classification, return_parts=False):
predicted = classification
denominator = 0
for actual in matrix.classes():
denominator += matrix[predicted][actual]
numerator = matrix[classification][classification]
if return_parts:
return numerator, denominator
else:
if numerator is 0:
return Decimal(0)
else:
return Decimal(numerator) / Decimal(denominator)
def __recall__(matrix, classification, return_parts=False):
actual = classification
denominator = 0
for predicted in matrix.classes():
denominator += matrix[predicted][actual]
numerator = matrix[classification][classification]
if return_parts:
return numerator, denominator
else:
if numerator is 0:
return Decimal(0)
else:
return Decimal(numerator) / Decimal(denominator)
def __fmeasure__(matrix, classification=None, beta=Decimal('1.0'),
return_parts=False):
'''
Calculate F-measure, also known as F-score.
.. math::
\\text{F}_{\\beta}\\text{-measure} = \\frac{(1 + \\beta^2) \\cdot TP}\
{(1 + \\beta^2) \\cdot TP + \\beta^2 \\cdot FN + FP}
:param matrix: Confusion matrix
:param predicted: Precision for this classification label
:type matrix: :class:`ConfusionMatrix`
:returns: F-measure.
:rtype: :class:`decimal.Decimal`
'''
class_precision = __precision__(matrix, classification)
class_recall = __recall__(matrix, classification)
if not return_parts and (class_precision == 0 or class_recall == 0):
return 0
else:
# Convert to Decimal
beta = Decimal(str(beta))
# Calculate terms
beta2 = beta ** 2
beta2_1 = Decimal('1.0') + beta2
numerator = beta2_1 * class_precision * class_recall
denominator = (beta2 * class_precision) + class_recall
if return_parts:
return numerator, denominator
else:
return Decimal(numerator) / Decimal(denominator)
def precision(matrix, classification=None, version=Average.micro):
'''
Calculate precision.
:param matrix: Confusion matrix
:param classification: Classification label to compute this metric for
:param version: Averaging-method version.
:type matrix: :class:`ConfusionMatrix`
:type classification: Any :class:`dict` index
:type version: :class:`Average`
'''
arguments = dict()
arguments['matrix'] = matrix
arguments['classification'] = classification
return __value_micro_macro__(__precision__, arguments, classification, version)
def recall(matrix, classification=None, version=Average.micro):
'''
Calculate recall.
:param matrix: Confusion matrix
:param classification: Classification label to compute this metric for
:param version: Averaging-method version.
:type matrix: :class:`ConfusionMatrix`
:type classification: Any :class:`dict` index
:type version: :class:`Average`
'''
arguments = dict()
arguments['matrix'] = matrix
arguments['classification'] = classification
return __value_micro_macro__(__recall__, arguments, classification, version)
def fmeasure(matrix, classification=None, beta=Decimal('1.0'),
version=Average.micro):
'''
Calculate FMeasure.
:param matrix: Confusion matrix
:param classification: Classification label to compute this metric for
:param version: Averaging-method version.
:type matrix: :class:`ConfusionMatrix`
:type classification: Any :class:`dict` index
:type version: :class:`Average`
'''
arguments = dict()
arguments['matrix'] = matrix
arguments['classification'] = classification
arguments['beta'] = beta
return __value_micro_macro__(__fmeasure__, arguments, classification, version)
class _InnerConfusionMatrix(defaultdict):
'''
Inner dict of the confusion matrix; used to determine when the classes list
is dirty.
'''
def __init__(self, parent):
self.__parent__ = parent
defaultdict.__init__(self, int)
def __setitem__(self, key, value):
self.__parent__.__dirty_classes__ = True
defaultdict.__setitem__(self, key, value)
class ConfusionMatrix(dict):
'''
A :func:`dict`-like representation of a confusion matrix offering some automation.
To access/store values, use: ``matrix[predicted][actual]``.
'''
__classes__ = set()
__dirty_classes__ = False
def __setitem__(self, key, value):
raise AttributeError('no such method')
def __getitem__(self, key):
'''
Return default dicts and store them so that the following is possible:
>>> matrix = ConfusionMatrix()
>>> matrix['a']['b'] += 1
>>> matrix['a']['b']
1
>>> matrix['a']['a'] = 1
>>> matrix['a']['a']
1
>>> matrix['c']['d']
0
'''
value = None
if key not in self:
value = _InnerConfusionMatrix(self)
dict.__setitem__(self, key, value)
else:
value = dict.__getitem__(self, key)
return value
def classes(self):
'''
Retrieve the set of all classes.
'''
if self.__dirty_classes__:
self.__classes__ = set()
for predicted, values in self.items():
self.__classes__.add(predicted)
for actual in values.keys():
self.__classes__.add(actual)
self.__dirty_classes__ = False
return self.__classes__
| 31.731225 | 86 | 0.626557 |
a07076e8dd2359b6ac9323c87ec8f9119c8bdace | 17,516 | py | Python | nncls/data/_meta.py | xiaohu2015/nncls | b7071b69c0ca9521e914eab2f8dcfaf1a80723d6 | [
"Apache-2.0"
] | 8 | 2021-11-10T13:09:26.000Z | 2021-12-12T00:11:46.000Z | nncls/data/_meta.py | xiaohu2015/nncls | b7071b69c0ca9521e914eab2f8dcfaf1a80723d6 | [
"Apache-2.0"
] | null | null | null | nncls/data/_meta.py | xiaohu2015/nncls | b7071b69c0ca9521e914eab2f8dcfaf1a80723d6 | [
"Apache-2.0"
] | null | null | null |
_IMAGENET_CATEGORIES = [
"tench",
"goldfish",
"great white shark",
"tiger shark",
"hammerhead",
"electric ray",
"stingray",
"cock",
"hen",
"ostrich",
"brambling",
"goldfinch",
"house finch",
"junco",
"indigo bunting",
"robin",
"bulbul",
"jay",
"magpie",
"chickadee",
"water ouzel",
"kite",
"bald eagle",
"vulture",
"great grey owl",
"European fire salamander",
"common newt",
"eft",
"spotted salamander",
"axolotl",
"bullfrog",
"tree frog",
"tailed frog",
"loggerhead",
"leatherback turtle",
"mud turtle",
"terrapin",
"box turtle",
"banded gecko",
"common iguana",
"American chameleon",
"whiptail",
"agama",
"frilled lizard",
"alligator lizard",
"Gila monster",
"green lizard",
"African chameleon",
"Komodo dragon",
"African crocodile",
"American alligator",
"triceratops",
"thunder snake",
"ringneck snake",
"hognose snake",
"green snake",
"king snake",
"garter snake",
"water snake",
"vine snake",
"night snake",
"boa constrictor",
"rock python",
"Indian cobra",
"green mamba",
"sea snake",
"horned viper",
"diamondback",
"sidewinder",
"trilobite",
"harvestman",
"scorpion",
"black and gold garden spider",
"barn spider",
"garden spider",
"black widow",
"tarantula",
"wolf spider",
"tick",
"centipede",
"black grouse",
"ptarmigan",
"ruffed grouse",
"prairie chicken",
"peacock",
"quail",
"partridge",
"African grey",
"macaw",
"sulphur-crested cockatoo",
"lorikeet",
"coucal",
"bee eater",
"hornbill",
"hummingbird",
"jacamar",
"toucan",
"drake",
"red-breasted merganser",
"goose",
"black swan",
"tusker",
"echidna",
"platypus",
"wallaby",
"koala",
"wombat",
"jellyfish",
"sea anemone",
"brain coral",
"flatworm",
"nematode",
"conch",
"snail",
"slug",
"sea slug",
"chiton",
"chambered nautilus",
"Dungeness crab",
"rock crab",
"fiddler crab",
"king crab",
"American lobster",
"spiny lobster",
"crayfish",
"hermit crab",
"isopod",
"white stork",
"black stork",
"spoonbill",
"flamingo",
"little blue heron",
"American egret",
"bittern",
"crane bird",
"limpkin",
"European gallinule",
"American coot",
"bustard",
"ruddy turnstone",
"red-backed sandpiper",
"redshank",
"dowitcher",
"oystercatcher",
"pelican",
"king penguin",
"albatross",
"grey whale",
"killer whale",
"dugong",
"sea lion",
"Chihuahua",
"Japanese spaniel",
"Maltese dog",
"Pekinese",
"Shih-Tzu",
"Blenheim spaniel",
"papillon",
"toy terrier",
"Rhodesian ridgeback",
"Afghan hound",
"basset",
"beagle",
"bloodhound",
"bluetick",
"black-and-tan coonhound",
"Walker hound",
"English foxhound",
"redbone",
"borzoi",
"Irish wolfhound",
"Italian greyhound",
"whippet",
"Ibizan hound",
"Norwegian elkhound",
"otterhound",
"Saluki",
"Scottish deerhound",
"Weimaraner",
"Staffordshire bullterrier",
"American Staffordshire terrier",
"Bedlington terrier",
"Border terrier",
"Kerry blue terrier",
"Irish terrier",
"Norfolk terrier",
"Norwich terrier",
"Yorkshire terrier",
"wire-haired fox terrier",
"Lakeland terrier",
"Sealyham terrier",
"Airedale",
"cairn",
"Australian terrier",
"Dandie Dinmont",
"Boston bull",
"miniature schnauzer",
"giant schnauzer",
"standard schnauzer",
"Scotch terrier",
"Tibetan terrier",
"silky terrier",
"soft-coated wheaten terrier",
"West Highland white terrier",
"Lhasa",
"flat-coated retriever",
"curly-coated retriever",
"golden retriever",
"Labrador retriever",
"Chesapeake Bay retriever",
"German short-haired pointer",
"vizsla",
"English setter",
"Irish setter",
"Gordon setter",
"Brittany spaniel",
"clumber",
"English springer",
"Welsh springer spaniel",
"cocker spaniel",
"Sussex spaniel",
"Irish water spaniel",
"kuvasz",
"schipperke",
"groenendael",
"malinois",
"briard",
"kelpie",
"komondor",
"Old English sheepdog",
"Shetland sheepdog",
"collie",
"Border collie",
"Bouvier des Flandres",
"Rottweiler",
"German shepherd",
"Doberman",
"miniature pinscher",
"Greater Swiss Mountain dog",
"Bernese mountain dog",
"Appenzeller",
"EntleBucher",
"boxer",
"bull mastiff",
"Tibetan mastiff",
"French bulldog",
"Great Dane",
"Saint Bernard",
"Eskimo dog",
"malamute",
"Siberian husky",
"dalmatian",
"affenpinscher",
"basenji",
"pug",
"Leonberg",
"Newfoundland",
"Great Pyrenees",
"Samoyed",
"Pomeranian",
"chow",
"keeshond",
"Brabancon griffon",
"Pembroke",
"Cardigan",
"toy poodle",
"miniature poodle",
"standard poodle",
"Mexican hairless",
"timber wolf",
"white wolf",
"red wolf",
"coyote",
"dingo",
"dhole",
"African hunting dog",
"hyena",
"red fox",
"kit fox",
"Arctic fox",
"grey fox",
"tabby",
"tiger cat",
"Persian cat",
"Siamese cat",
"Egyptian cat",
"cougar",
"lynx",
"leopard",
"snow leopard",
"jaguar",
"lion",
"tiger",
"cheetah",
"brown bear",
"American black bear",
"ice bear",
"sloth bear",
"mongoose",
"meerkat",
"tiger beetle",
"ladybug",
"ground beetle",
"long-horned beetle",
"leaf beetle",
"dung beetle",
"rhinoceros beetle",
"weevil",
"fly",
"bee",
"ant",
"grasshopper",
"cricket",
"walking stick",
"cockroach",
"mantis",
"cicada",
"leafhopper",
"lacewing",
"dragonfly",
"damselfly",
"admiral",
"ringlet",
"monarch",
"cabbage butterfly",
"sulphur butterfly",
"lycaenid",
"starfish",
"sea urchin",
"sea cucumber",
"wood rabbit",
"hare",
"Angora",
"hamster",
"porcupine",
"fox squirrel",
"marmot",
"beaver",
"guinea pig",
"sorrel",
"zebra",
"hog",
"wild boar",
"warthog",
"hippopotamus",
"ox",
"water buffalo",
"bison",
"ram",
"bighorn",
"ibex",
"hartebeest",
"impala",
"gazelle",
"Arabian camel",
"llama",
"weasel",
"mink",
"polecat",
"black-footed ferret",
"otter",
"skunk",
"badger",
"armadillo",
"three-toed sloth",
"orangutan",
"gorilla",
"chimpanzee",
"gibbon",
"siamang",
"guenon",
"patas",
"baboon",
"macaque",
"langur",
"colobus",
"proboscis monkey",
"marmoset",
"capuchin",
"howler monkey",
"titi",
"spider monkey",
"squirrel monkey",
"Madagascar cat",
"indri",
"Indian elephant",
"African elephant",
"lesser panda",
"giant panda",
"barracouta",
"eel",
"coho",
"rock beauty",
"anemone fish",
"sturgeon",
"gar",
"lionfish",
"puffer",
"abacus",
"abaya",
"academic gown",
"accordion",
"acoustic guitar",
"aircraft carrier",
"airliner",
"airship",
"altar",
"ambulance",
"amphibian",
"analog clock",
"apiary",
"apron",
"ashcan",
"assault rifle",
"backpack",
"bakery",
"balance beam",
"balloon",
"ballpoint",
"Band Aid",
"banjo",
"bannister",
"barbell",
"barber chair",
"barbershop",
"barn",
"barometer",
"barrel",
"barrow",
"baseball",
"basketball",
"bassinet",
"bassoon",
"bathing cap",
"bath towel",
"bathtub",
"beach wagon",
"beacon",
"beaker",
"bearskin",
"beer bottle",
"beer glass",
"bell cote",
"bib",
"bicycle-built-for-two",
"bikini",
"binder",
"binoculars",
"birdhouse",
"boathouse",
"bobsled",
"bolo tie",
"bonnet",
"bookcase",
"bookshop",
"bottlecap",
"bow",
"bow tie",
"brass",
"brassiere",
"breakwater",
"breastplate",
"broom",
"bucket",
"buckle",
"bulletproof vest",
"bullet train",
"butcher shop",
"cab",
"caldron",
"candle",
"cannon",
"canoe",
"can opener",
"cardigan",
"car mirror",
"carousel",
"carpenter's kit",
"carton",
"car wheel",
"cash machine",
"cassette",
"cassette player",
"castle",
"catamaran",
"CD player",
"cello",
"cellular telephone",
"chain",
"chainlink fence",
"chain mail",
"chain saw",
"chest",
"chiffonier",
"chime",
"china cabinet",
"Christmas stocking",
"church",
"cinema",
"cleaver",
"cliff dwelling",
"cloak",
"clog",
"cocktail shaker",
"coffee mug",
"coffeepot",
"coil",
"combination lock",
"computer keyboard",
"confectionery",
"container ship",
"convertible",
"corkscrew",
"cornet",
"cowboy boot",
"cowboy hat",
"cradle",
"crane",
"crash helmet",
"crate",
"crib",
"Crock Pot",
"croquet ball",
"crutch",
"cuirass",
"dam",
"desk",
"desktop computer",
"dial telephone",
"diaper",
"digital clock",
"digital watch",
"dining table",
"dishrag",
"dishwasher",
"disk brake",
"dock",
"dogsled",
"dome",
"doormat",
"drilling platform",
"drum",
"drumstick",
"dumbbell",
"Dutch oven",
"electric fan",
"electric guitar",
"electric locomotive",
"entertainment center",
"envelope",
"espresso maker",
"face powder",
"feather boa",
"file",
"fireboat",
"fire engine",
"fire screen",
"flagpole",
"flute",
"folding chair",
"football helmet",
"forklift",
"fountain",
"fountain pen",
"four-poster",
"freight car",
"French horn",
"frying pan",
"fur coat",
"garbage truck",
"gasmask",
"gas pump",
"goblet",
"go-kart",
"golf ball",
"golfcart",
"gondola",
"gong",
"gown",
"grand piano",
"greenhouse",
"grille",
"grocery store",
"guillotine",
"hair slide",
"hair spray",
"half track",
"hammer",
"hamper",
"hand blower",
"hand-held computer",
"handkerchief",
"hard disc",
"harmonica",
"harp",
"harvester",
"hatchet",
"holster",
"home theater",
"honeycomb",
"hook",
"hoopskirt",
"horizontal bar",
"horse cart",
"hourglass",
"iPod",
"iron",
"jack-o'-lantern",
"jean",
"jeep",
"jersey",
"jigsaw puzzle",
"jinrikisha",
"joystick",
"kimono",
"knee pad",
"knot",
"lab coat",
"ladle",
"lampshade",
"laptop",
"lawn mower",
"lens cap",
"letter opener",
"library",
"lifeboat",
"lighter",
"limousine",
"liner",
"lipstick",
"Loafer",
"lotion",
"loudspeaker",
"loupe",
"lumbermill",
"magnetic compass",
"mailbag",
"mailbox",
"maillot",
"maillot tank suit",
"manhole cover",
"maraca",
"marimba",
"mask",
"matchstick",
"maypole",
"maze",
"measuring cup",
"medicine chest",
"megalith",
"microphone",
"microwave",
"military uniform",
"milk can",
"minibus",
"miniskirt",
"minivan",
"missile",
"mitten",
"mixing bowl",
"mobile home",
"Model T",
"modem",
"monastery",
"monitor",
"moped",
"mortar",
"mortarboard",
"mosque",
"mosquito net",
"motor scooter",
"mountain bike",
"mountain tent",
"mouse",
"mousetrap",
"moving van",
"muzzle",
"nail",
"neck brace",
"necklace",
"nipple",
"notebook",
"obelisk",
"oboe",
"ocarina",
"odometer",
"oil filter",
"organ",
"oscilloscope",
"overskirt",
"oxcart",
"oxygen mask",
"packet",
"paddle",
"paddlewheel",
"padlock",
"paintbrush",
"pajama",
"palace",
"panpipe",
"paper towel",
"parachute",
"parallel bars",
"park bench",
"parking meter",
"passenger car",
"patio",
"pay-phone",
"pedestal",
"pencil box",
"pencil sharpener",
"perfume",
"Petri dish",
"photocopier",
"pick",
"pickelhaube",
"picket fence",
"pickup",
"pier",
"piggy bank",
"pill bottle",
"pillow",
"ping-pong ball",
"pinwheel",
"pirate",
"pitcher",
"plane",
"planetarium",
"plastic bag",
"plate rack",
"plow",
"plunger",
"Polaroid camera",
"pole",
"police van",
"poncho",
"pool table",
"pop bottle",
"pot",
"potter's wheel",
"power drill",
"prayer rug",
"printer",
"prison",
"projectile",
"projector",
"puck",
"punching bag",
"purse",
"quill",
"quilt",
"racer",
"racket",
"radiator",
"radio",
"radio telescope",
"rain barrel",
"recreational vehicle",
"reel",
"reflex camera",
"refrigerator",
"remote control",
"restaurant",
"revolver",
"rifle",
"rocking chair",
"rotisserie",
"rubber eraser",
"rugby ball",
"rule",
"running shoe",
"safe",
"safety pin",
"saltshaker",
"sandal",
"sarong",
"sax",
"scabbard",
"scale",
"school bus",
"schooner",
"scoreboard",
"screen",
"screw",
"screwdriver",
"seat belt",
"sewing machine",
"shield",
"shoe shop",
"shoji",
"shopping basket",
"shopping cart",
"shovel",
"shower cap",
"shower curtain",
"ski",
"ski mask",
"sleeping bag",
"slide rule",
"sliding door",
"slot",
"snorkel",
"snowmobile",
"snowplow",
"soap dispenser",
"soccer ball",
"sock",
"solar dish",
"sombrero",
"soup bowl",
"space bar",
"space heater",
"space shuttle",
"spatula",
"speedboat",
"spider web",
"spindle",
"sports car",
"spotlight",
"stage",
"steam locomotive",
"steel arch bridge",
"steel drum",
"stethoscope",
"stole",
"stone wall",
"stopwatch",
"stove",
"strainer",
"streetcar",
"stretcher",
"studio couch",
"stupa",
"submarine",
"suit",
"sundial",
"sunglass",
"sunglasses",
"sunscreen",
"suspension bridge",
"swab",
"sweatshirt",
"swimming trunks",
"swing",
"switch",
"syringe",
"table lamp",
"tank",
"tape player",
"teapot",
"teddy",
"television",
"tennis ball",
"thatch",
"theater curtain",
"thimble",
"thresher",
"throne",
"tile roof",
"toaster",
"tobacco shop",
"toilet seat",
"torch",
"totem pole",
"tow truck",
"toyshop",
"tractor",
"trailer truck",
"tray",
"trench coat",
"tricycle",
"trimaran",
"tripod",
"triumphal arch",
"trolleybus",
"trombone",
"tub",
"turnstile",
"typewriter keyboard",
"umbrella",
"unicycle",
"upright",
"vacuum",
"vase",
"vault",
"velvet",
"vending machine",
"vestment",
"viaduct",
"violin",
"volleyball",
"waffle iron",
"wall clock",
"wallet",
"wardrobe",
"warplane",
"washbasin",
"washer",
"water bottle",
"water jug",
"water tower",
"whiskey jug",
"whistle",
"wig",
"window screen",
"window shade",
"Windsor tie",
"wine bottle",
"wing",
"wok",
"wooden spoon",
"wool",
"worm fence",
"wreck",
"yawl",
"yurt",
"web site",
"comic book",
"crossword puzzle",
"street sign",
"traffic light",
"book jacket",
"menu",
"plate",
"guacamole",
"consomme",
"hot pot",
"trifle",
"ice cream",
"ice lolly",
"French loaf",
"bagel",
"pretzel",
"cheeseburger",
"hotdog",
"mashed potato",
"head cabbage",
"broccoli",
"cauliflower",
"zucchini",
"spaghetti squash",
"acorn squash",
"butternut squash",
"cucumber",
"artichoke",
"bell pepper",
"cardoon",
"mushroom",
"Granny Smith",
"strawberry",
"orange",
"lemon",
"fig",
"pineapple",
"banana",
"jackfruit",
"custard apple",
"pomegranate",
"hay",
"carbonara",
"chocolate sauce",
"dough",
"meat loaf",
"pizza",
"potpie",
"burrito",
"red wine",
"espresso",
"cup",
"eggnog",
"alp",
"bubble",
"cliff",
"coral reef",
"geyser",
"lakeside",
"promontory",
"sandbar",
"seashore",
"valley",
"volcano",
"ballplayer",
"groom",
"scuba diver",
"rapeseed",
"daisy",
"yellow lady's slipper",
"corn",
"acorn",
"hip",
"buckeye",
"coral fungus",
"agaric",
"gyromitra",
"stinkhorn",
"earthstar",
"hen-of-the-woods",
"bolete",
"ear",
"toilet tissue",
]
| 17.446215 | 37 | 0.515757 |
dea1be16f0239343a92a5db2b2f074a2af189da5 | 261 | py | Python | tests/test_migrations_func.py | Drizzt1991/plasticine | be61baa88f53bdfa666d068a14f17ccc0cfe4d02 | [
"MIT"
] | null | null | null | tests/test_migrations_func.py | Drizzt1991/plasticine | be61baa88f53bdfa666d068a14f17ccc0cfe4d02 | [
"MIT"
] | null | null | null | tests/test_migrations_func.py | Drizzt1991/plasticine | be61baa88f53bdfa666d068a14f17ccc0cfe4d02 | [
"MIT"
] | null | null | null |
from unittest import TestCase
from elasticine.Migrations import MappingMigration
class TestMigrationsFunc(TestCase):
def setUp(self):
self.elastic = Elasticsearch(hosts, **es_params)
def test_simple_migration(self):
pass
| 16.3125 | 56 | 0.708812 |
12839363bf8a8fff2a875e22fd522df989c543e8 | 2,491 | py | Python | monster.py | ichomchom/Indeed-Bot | ce0355e9533327b01caf547b000988282aed4483 | [
"MIT"
] | 17 | 2019-08-04T18:05:34.000Z | 2022-03-20T22:30:51.000Z | monster.py | ichomchom/Indeed-Bot | ce0355e9533327b01caf547b000988282aed4483 | [
"MIT"
] | 1 | 2020-05-30T16:59:36.000Z | 2021-04-20T07:53:49.000Z | monster.py | ichomchom/Indeed-Bot | ce0355e9533327b01caf547b000988282aed4483 | [
"MIT"
] | 7 | 2019-12-17T19:21:28.000Z | 2022-02-08T11:27:23.000Z | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver import ActionChains
from bs4 import BeautifulSoup
import info
# Create chrome driver
driver = webdriver.Chrome('./chromedriver.exe')
# Headless chrome driver
#options.headless = True
# Open the driver and go to monster.com
driver.get('https://login20.monster.com')
# Log in
driver.find_element_by_xpath('//*[@id="EmailAddress"]').send_keys(info.email)
driver.find_element_by_xpath('//*[@id="Password"]').send_keys(info.password)
driver.find_element_by_xpath('//*[@id="btn-login"]').click()
titleElem = driver.find_element_by_xpath('//*[@id="rs-search-job"]')
titleElem.send_keys(Keys.CONTROL, 'a')
titleElem.send_keys(info.title)
whereElem = driver.find_element_by_xpath('//*[@id="rs-search-location"]')
whereElem.send_keys(Keys.CONTROL, 'a')
whereElem.send_keys(info.zipCode)
whereElem.submit()
# Click on job search with recent search
#driver.find_element_by_xpath('//*[@id="hp-job-search"]').click()
# Wait 10s for page to load
driver.implicitly_wait(10)
# Create jobs list
jobs = driver.find_elements_by_class_name('summary')
# Main window
main = driver.window_handles[0]
for job in jobs:
driver.implicitly_wait(20)
# Click on each job
job.click()
driver.implicitly_wait(20)
# if driver.find_element_by_xpath('//*[@id="expired-job-alert"]'):
# break
# Click on apply job
driver.find_element_by_xpath("//a[@id='PrimaryJobApply']").click()
# If open new page, go to that page and close the page
if len(driver.window_handles) == 2:
driver.switch_to.window(driver.window_handles[1])
driver.close()
driver.switch_to.window(main)
#elif driver.find_element_by_xpath('//*[@id="SpeedApply"]/section/div/div[2]/a').is_enabled():
#continue
else:
# Apply using monster only click apply
driver.find_element_by_xpath('//*[@id="applybtn"]').click()
# TODO: Fix when go back to main page go to next job
# If job already applied, alert pop up, then go back to main page
if driver.find_element_by_xpath('//*[@id="ApplyAlert"]').is_enabled():
driver.back()
driver.back()
break | 30.753086 | 102 | 0.704135 |
da7b941c40ec775014f79af9856aca9760669a06 | 1,234 | py | Python | data/p4VQE/R4/benchmark/startPyquil170.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startPyquil170.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p4VQE/R4/benchmark/startPyquil170.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=12
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Y(3) # number=5
prog += SWAP(1,0) # number=6
prog += SWAP(1,0) # number=7
prog += SWAP(2,0) # number=8
prog += SWAP(2,0) # number=9
prog += CNOT(3,0) # number=10
prog += CNOT(3,0) # number=11
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil170.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 23.283019 | 64 | 0.599676 |
efdcae5ded4bf8b6654d9a311c777d0d25a8d295 | 12,910 | py | Python | backend/final.py | vsraghavhk/interactive-gcoder | 1aa7b20a90a03de7b2df51e27b6a4e630eb75f6f | [
"Apache-2.0"
] | 1 | 2020-12-02T19:29:32.000Z | 2020-12-02T19:29:32.000Z | backend/final.py | vsraghavhk/interactive-gcoder | 1aa7b20a90a03de7b2df51e27b6a4e630eb75f6f | [
"Apache-2.0"
] | null | null | null | backend/final.py | vsraghavhk/interactive-gcoder | 1aa7b20a90a03de7b2df51e27b6a4e630eb75f6f | [
"Apache-2.0"
] | null | null | null | import os
import json
import math
from random import randrange
import matplotlib.pyplot as plt
from create_json import make_json
from collections import defaultdict
filename = "default.gcode"
data_file = "variables.json"
# create data.json file if it doesn't exist.
if os.path.exists(data_file) == False:
make_json()
class Model:
'''
Requires num of sides, edge length
Auto calculates radius, area, and vertices.
These funcitons are available for separate calulations as well if needed.
'''
def __init__(self, file=data_file):
json_file = open(file)
data = json.load(json_file)
# User accessible
self.Z_shift = data['z_shift']
self.bed_temp = data['bed_temp']
self.nozzle_temp = data['nozzle_temp']
self.E_rate = data['e_rate']
self.F_rate = data['f_rate']
self.E_mode = data['e_mode']
self.num_sides = data['num_sides']
self.edge_length = data['edge_length']
self.num_layers = data['num_layers']
self.func_choice = data['func_choice']
self.wave_amp = data['wave_amp']
self.radius = None
self.random_value = data['random_value']
# Non user accesible
self.center_x = data['center_x']
self.center_y = data['center_y']
self.base_x = []
self.base_y = []
self.backup_radius = None
self.backup_el = None
self.cur_layer = 0
self.calc_rad_el()
self.find_base_points()
self.update_json()
# Wave parameters
self.og_radius = self.radius
self.wave_dir = 1
global filename
filename = "../gcodes/func{}_{}sides_{}mmEdge.gcode".format(
self.func_choice, self.num_sides, self.edge_length)
json_file.close()
# ----- For Updating json only. ----- #
# Only function to be called from web-app
def prep_json(self):
f = open(data_file, "r")
contents = f.readlines()
f.close()
contents.insert(0, '[\n')
contents.insert(-1, '\n]')
f = open(data_file, "w")
contents = "".join(contents)
f.write(contents)
f.close()
def update_json(self):
global filename
self.calc_rad_el()
data = {}
data = {
'filename': filename,
'random_value': self.random_value,
'func_choice': self.func_choice,
'num_sides': self.num_sides,
'edge_length': self.edge_length,
'num_layers': self.num_layers,
'radius': self.radius,
'base_x': self.base_x,
'base_y': self.base_y,
'backup_radius': self.backup_radius,
'backup_el': self.backup_el,
'center_x': self.center_x,
'center_y': self.center_y,
'z_shift': self.Z_shift,
'bed_temp': self.bed_temp,
'nozzle_temp': self.nozzle_temp,
'e_rate': self.E_rate, # mm of filament per cm of print
'f_rate': self.F_rate,
'e_mode': self.E_mode, # Absolute(1)[default] / Relative(0)
'wave_amp': self.wave_amp
}
with open(data_file, 'w') as outfile:
json.dump(data, outfile, indent=4, sort_keys=True)
# ----- VALUE SETTERS ----- #
# For backend use only. Do not call from frontend.
def calc_rad_el(self):
# Radius = s / (2* sin(180/n))
if self.edge_length == None and self.radius == None:
# First init
self.radius = self.edge_length / (2 * math.sin(180/self.num_sides))
self.backup_radius = self.radius
self.backup_el = self.edge_length
elif self.backup_el == self.edge_length and self.backup_radius != self.radius:
# Update edge length since radius is changed
self.edge_length = abs(
round(self.radius * (2 * math.sin(180/self.num_sides)), 2))
else:
# Update radius since edge length ois changed (or both changed)
self.radius = abs(
round(self.edge_length / (2 * math.sin(180/self.num_sides)), 2))
self.backup_el = self.edge_length
self.backup_radius = self.radius
def find_base_points(self):
# Find the vertices of the base layer for a n-sided polygon.
self.base_x = []
self.base_y = []
for i in range(0, self.num_sides):
self.base_x.append(
round(self.center_x + self.radius * math.cos(2*math.pi*i/self.num_sides), 2))
self.base_y.append(
round(self.center_y + self.radius * math.sin(2*math.pi*i/self.num_sides), 2))
def calc_E_val(self, cur_E, x1, y1, x2, y2):
#
# E_rate is mm of filament to use per cm of print
dist = math.sqrt((x1-x2)**2 + (y1-y2)**2) # in mm
return round(((cur_E * self.E_mode) + (dist * self.E_rate / 10)), 2)
# ----- Functions to deform model ----- #
def func(self, x, y):
# Parsing function
if self.func_choice == 2:
return self.spiral_xy(x, y)
elif self.func_choice == 3:
return self.wave_xy(x, y)
elif self.func_choice == 4:
return self.screw_xy(x, y)
elif self.func_choice == 5:
return self.random_xy(x, y)
elif self.func_choice == 6:
return self.shift_xy(x, y)
elif self.func_choice == 7:
return self.shift_turn_xy(x, y)
else: # For choice=1 or anything else
return self.straight_xy(x, y)
def straight_xy(self, x, y, cx=-1, cy=-1):
# Find the vertices of the base layer for a n-sided polygon.
x = []
y = []
if cx == -1 and cy == -1:
for i in range(0, self.num_sides):
x.append(round(self.center_x + self.radius *
math.cos(2*math.pi*i/self.num_sides), 2))
y.append(round(self.center_y + self.radius *
math.sin(2*math.pi*i/self.num_sides), 2))
else:
for i in range(0, self.num_sides):
x.append(round(cx + self.radius *
math.cos(2*math.pi*i/self.num_sides), 2))
y.append(round(cy + self.radius *
math.sin(2*math.pi*i/self.num_sides), 2))
return x, y
def spiral_xy(self, x, y, cx=-1, cy=-1):
# Twists the layer by 1 degree (0.0174533 radians) every layer.
x = []
y = []
if cx == -1 and cy == -1:
for i in range(0, self.num_sides):
x.append(round(self.center_x + self.radius * math.cos((2 *
math.pi*i/self.num_sides)+(self.cur_layer*0.0174533)), 2))
y.append(round(self.center_y + self.radius * math.sin((2 *
math.pi*i/self.num_sides)+(self.cur_layer*0.0174533)), 2))
else:
for i in range(0, self.num_sides):
x.append(round(cx + self.radius * math.cos((2*math.pi *
i/self.num_sides)+(self.cur_layer*0.0174533)), 2))
y.append(round(cy + self.radius * math.sin((2*math.pi *
i/self.num_sides)+(self.cur_layer*0.0174533)), 2))
return x, y
def wave_xy(self, x, y):
if self.radius >= (self.og_radius + self.wave_amp):
self.wave_dir = -1
elif self.radius <= (self.og_radius - self.wave_amp):
self.wave_dir = 1
self.radius = self.radius + self.wave_dir*0.1
self.calc_rad_el()
return self.straight_xy(x, y)
def random_xy(self, x, y):
self.radius = self.radius + \
(randrange(self.random_value) - self.random_value/2)/10
self.calc_rad_el()
return self.straight_xy(x, y)
def screw_xy(self, x, y):
if self.radius >= 2:
self.radius = self.radius - 0.05
self.calc_rad_el()
return self.spiral_xy(x, y)
def shift_xy(self, x, y):
cx = self.center_x + self.cur_layer*0.1
cy = self.center_y
return self.straight_xy(x, y, cx, cy)
def shift_turn_xy(self, x, y):
cx = self.center_x + self.cur_layer*0.1
cy = self.center_y
return self.spiral_xy(x, y, cx, cy)
# ----- GCODE FUNCTIONS ----- #
def write_init_settings(self):
global filename
new = open(filename, "w+")
# Settings
new.write("(File: {})\n".format(filename))
new.write("M104 S{}\n".format(self.nozzle_temp))
new.write("M105\n")
new.write("M109 S{}\n".format(self.nozzle_temp))
new.write("M140 S{}\n".format(self.bed_temp))
new.write("M105\n")
new.write("M190 S{}\n".format(self.bed_temp))
if self.E_mode == 1:
new.write("M82; Absolute Extrustion\n")
else:
new.write("M83; Relative Extrusion\n")
# Printing the line on the side to make sure nozzle works.
E = 0
new.write("G92 E0; Reset Extruder\n")
new.write("G1 Z2.0 F3000 ; Move Z Axis up\n")
new.write(
"G1 X20 Y4.7 Z{} F5000.0 ; Move to start position\n".format(self.Z_shift))
E = (E * self.E_mode) + (180*self.E_rate/10)
new.write("G1 X200 Y4.7 Z{} F1500.0 E{} ; Draw the first line\n".format(
self.Z_shift, E))
new.write(
"G1 X200 Y5 Z{} F5000.0 ; Move to side a little\n".format(self.Z_shift))
E = (E * self.E_mode) + (180*self.E_rate/10)
new.write("G1 X20 Y5 Z{} F1500.0 E{} ; Draw the second line\n".format(
self.Z_shift, E))
new.write("G92 E0 ; Reset Extruder\n")
new.write("(begin model)\n")
new.close()
def make_gcode(self):
self.write_init_settings()
#filename = filename + ".gcode"
new = open(filename, "a")
x = self.base_x
y = self.base_y
z = 0
E_val = 0
for i in range(self.num_layers):
self.cur_layer = i
# Update layer height
z = round((z + self.Z_shift), 2)
# Calc x and y for layer.
x, y = self.func(x, y)
new.write("(Layer {})\n".format(i+1))
for v in range(self.num_sides):
if self.E_mode == 0: # If relative, E becomes 0 every time
E_val = 0
if v == 0:
# First point and start extrusion
new.write("G1 X{} Y{} Z{} F{}\n".format(
x[v], y[v], z, self.F_rate))
new.write("M101\n")
else:
# (n-1 lines with extrusion)
# Calc and update E_val for side.
E_val = self.calc_E_val(E_val, x[v-1], y[v-1], x[v], y[v])
new.write("G1 X{} Y{} Z{} F{} E{}\n".format(
x[v], y[v], z, self.F_rate, E_val))
# Return back to staring point and stop extrusion
E_val = self.calc_E_val(E_val, x[-1], y[-1], x[0], y[0])
new.write("G1 X{} Y{} Z{} F{} E{}\n".format(
x[0], y[0], z, self.F_rate, E_val))
new.write("M103\n")
z = z+10
new.write("G1 X{} Y{} Z{} F{}; Move away from print\n".format(
x[0], y[0], z, self.F_rate))
new.write(
"G1 X5 Y200 Z{} F{}; Move away from print\n".format(z, self.F_rate))
new.close()
def main():
# Creates an object of the class.
model = Model()
model.make_gcode()
if __name__ == "__main__":
main()
'''
Ranges:
Z_shift = 0.1 to 5
Bed_temp = 0 to 70
Nozzle_temp = 200 to 280
E_rate = 0 to 1 (in mm of filament per cm of print)
F_rate = 500 to 3000
E_mode = 1 (For Absolute) or 0 (for relative)
num_sides = 2 to infinite (2 will produce a line)
edge_length = 1mm to 50mm (Depends on edge radius)
radius = 1mm to 100mm
num_layers = 1 to 200/Z_shift
wave_amp = 0.1 mm to 50 (Anything amplitude more radius has a chance to tip over. But user should be able to enter anyway)
random_value = 1 to 10
func_choice = {
1 : straight
2 : spiral
3 : wave
4 : Screw
5 : random
} # Anything we add after this will be 6, 7, ..
'''
| 36.468927 | 130 | 0.517893 |
4e807582d37d2680946b2e47678fe298d58265d7 | 12,937 | py | Python | onnxruntime/test/python/quantization/test_op_pad.py | SiriusKY/onnxruntime | 3c5853dcbc9d5dda2476afa8c6105802d2b8e53d | [
"MIT"
] | 1 | 2022-03-16T11:30:16.000Z | 2022-03-16T11:30:16.000Z | onnxruntime/test/python/quantization/test_op_pad.py | SiriusKY/onnxruntime | 3c5853dcbc9d5dda2476afa8c6105802d2b8e53d | [
"MIT"
] | null | null | null | onnxruntime/test/python/quantization/test_op_pad.py | SiriusKY/onnxruntime | 3c5853dcbc9d5dda2476afa8c6105802d2b8e53d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
import onnx
import numpy as np
from onnx import helper, TensorProto
from onnxruntime.quantization import quantize_static, quantize_dynamic, QuantType, QuantFormat
from op_test_utils import TestDataFeeds, check_model_correctness, check_op_type_count, check_qtype_by_node_type
class TestOpQuatizerPad(unittest.TestCase):
def input_feeds(self, n, name2shape):
input_data_list = []
for i in range(n):
inputs = {}
for name, shape in name2shape.items():
inputs.update({name: np.random.randint(-1, 2, shape).astype(np.float32)})
input_data_list.extend([inputs])
dr = TestDataFeeds(input_data_list)
return dr
def construct_model_pad(self, output_model_path, pad_mode, pad_input_shape, pad_dims, constant_value=None):
# (input)
# |
# Pad
# |
# (output)
rank = len(pad_input_shape)
self.assertEqual(rank * 2, len(pad_dims))
input_tensor = helper.make_tensor_value_info('input', TensorProto.FLOAT, pad_input_shape)
pad_dims_initializer = helper.make_tensor('pad_dims', TensorProto.INT64, [2 * rank], pad_dims)
output_shape = [sum(e) for e in list(zip(pad_input_shape, pad_dims[:rank], pad_dims[rank:]))]
output_tensor = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape)
inputs = ['input', 'pad_dims']
initializers = [pad_dims_initializer]
if (constant_value is not None) and (pad_mode is None or pad_mode == 'constant'):
constant_value_tensor = helper.make_tensor('padding_value', TensorProto.FLOAT, [], [constant_value])
inputs.extend(['padding_value'])
initializers.extend([constant_value_tensor])
kwargs = {'mode': pad_mode} if pad_mode is not None else {}
pad_node = helper.make_node('Pad', inputs, ['output'], name='PadNode', **kwargs)
graph = helper.make_graph([pad_node], 'TestOpQuantizerPad_test_model',
[input_tensor], [output_tensor], initializer=initializers)
model = helper.make_model(graph, opset_imports=[helper.make_opsetid("", 13)])
model.ir_version = 7 # use stable onnx ir version
onnx.save(model, output_model_path)
def construct_model_conv_pad(self, output_model_path, conv_input_shape, conv_weight_shape,
pad_input_shape, pad_mode, pad_dims, constant_value=None):
# (input)
# \
# Conv
# / \
# Identity Pad
# / \
# (identity_out) (output)
rank = len(pad_input_shape)
self.assertEqual(rank * 2, len(pad_dims))
input_tensor = helper.make_tensor_value_info('input', TensorProto.FLOAT, conv_input_shape)
conv_weight_arr = np.random.randint(-1, 2, conv_weight_shape).astype(np.float32)
conv_weight_initializer = onnx.numpy_helper.from_array(conv_weight_arr, name='conv1_weight')
conv_node = onnx.helper.make_node('Conv', ['input', 'conv1_weight'], ['conv_output'], name='conv_node')
identity_out = helper.make_tensor_value_info('identity_out', TensorProto.FLOAT, pad_input_shape)
identity_node = helper.make_node('Identity', ['conv_output'], ['identity_out'], name='IdentityNode')
pad_dims_initializer = helper.make_tensor('pad_dims', TensorProto.INT64, [2 * rank], pad_dims)
output_shape = [sum(e) for e in list(zip(pad_input_shape, pad_dims[:rank], pad_dims[rank:]))]
output_tensor = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape)
pad_inputs = ['conv_output', 'pad_dims']
initializers = [conv_weight_initializer, pad_dims_initializer]
if (constant_value is not None) and (pad_mode is None or pad_mode == 'constant'):
constant_value_tensor = helper.make_tensor('padding_value', TensorProto.FLOAT, [], [constant_value])
pad_inputs.extend(['padding_value'])
initializers.extend([constant_value_tensor])
kwargs = {'mode': pad_mode} if pad_mode is not None else {}
pad_node = helper.make_node('Pad', pad_inputs, ['output'], name='pad_node', **kwargs)
graph = helper.make_graph([conv_node, identity_node, pad_node], 'TestOpQuantizerPad_test_model',
[input_tensor], [identity_out, output_tensor], initializer=initializers)
model = helper.make_model(graph, opset_imports=[helper.make_opsetid("", 13)])
model.ir_version = 7 # use stable onnx ir version
onnx.save(model, output_model_path)
def quantize_model(self, model_fp32_path, model_i8_path, data_reader=None,
activation_type=QuantType.QUInt8, weight_type=QuantType.QUInt8, extra_options={}):
if data_reader is not None:
quantize_static(model_fp32_path, model_i8_path, data_reader, reduce_range=True, quant_format=QuantFormat.QOperator,
activation_type=activation_type, weight_type=weight_type, extra_options=extra_options)
else:
quantize_dynamic(model_fp32_path, model_i8_path, reduce_range=True,
weight_type=weight_type, extra_options=extra_options)
def verify_should_not_trigger(self, quantize_mode='static'):
np.random.seed(108)
model_fp32_path = 'qop_pad_notrigger_fp32_{}.onnx'.format(quantize_mode)
model_i8_path = 'qop_pad_notrigger_i8_{}.onnx'.format(quantize_mode)
data_reader = self.input_feeds(1, {'input': [1, 16, 31, 31]})
self.construct_model_pad(model_fp32_path, 'constant', [1, 16, 31, 31], [0, 0, 1, 2, 0, 0, 3, 4])
self.quantize_model(model_fp32_path, model_i8_path, None if quantize_mode != 'static' else data_reader)
data_reader.rewind()
# DequantizeLinear=0 pad node is not been quantized as input is not quantized.
check_op_type_count(self, model_i8_path, DynamicQuantizeLinear=0, QuantizeLinear=0, DequantizeLinear=0)
check_model_correctness(self, model_fp32_path, model_i8_path, data_reader.get_next())
def test_static_quantize_no_trigger(self):
self.verify_should_not_trigger(quantize_mode='static')
def test_dynamic_quantize_no_trigger(self):
self.verify_should_not_trigger(quantize_mode='dynamic')
def verify_quantize_with_pad_mode(self, pad_mode, constant_value=None, quantize_mode='static', rtol=0.01, atol=0.05,
activation_type=QuantType.QUInt8, weight_type=QuantType.QUInt8, extra_options={}):
np.random.seed(108)
tag_pad_mode = pad_mode if pad_mode is not None else 'none'
tag_constant_value = '' if constant_value is None else '_value'
model_fp32_path = 'qop_pad_{}_fp32_{}{}.onnx'.format(quantize_mode, tag_pad_mode, tag_constant_value)
data_reader = self.input_feeds(1, {'input': [1, 8, 33, 33]})
self.construct_model_conv_pad(model_fp32_path, [1, 8, 33, 33], [16, 8, 3, 3], [1, 16, 31, 31],
pad_mode, [0, 0, 1, 2, 0, 0, 3, 4], constant_value=constant_value)
activation_proto_qtype = TensorProto.UINT8 if activation_type == QuantType.QUInt8 else TensorProto.INT8
activation_type_str = 'u8' if (activation_type == QuantType.QUInt8) else 's8'
weight_type_str = 'u8' if (weight_type == QuantType.QUInt8) else 's8'
model_i8_path = 'qop_pad_{}_i8_{}{}_{}{}.onnx'.format(
quantize_mode, tag_pad_mode, tag_constant_value, activation_type_str, weight_type_str)
data_reader.rewind()
self.quantize_model(model_fp32_path, model_i8_path, None if quantize_mode != 'static' else data_reader,
activation_type=activation_type, weight_type=weight_type, extra_options=extra_options)
# DequantizeLinear=2 means there are one DequantizeLinear Node aftr both conv and pad,
# which means pad node is running in quantized semantic.
# In dynamic quantize mode, pad operator in fact not quantized as input is fp32.
if quantize_mode != 'static':
kwargs = {'DynamicQuantizeLinear': 1} if activation_type == QuantType.QUInt8 else {'QuantizeLinear': 1}
else:
kwargs = {'DequantizeLinear': 2, 'QuantizeLinear': 1}
check_op_type_count(self, model_i8_path, **kwargs)
# check node input/output type if such node exists in the graph
qnode_io_qtypes = {'QuantizeLinear': [['i', 2, activation_proto_qtype], ['o', 0, activation_proto_qtype]]}
qnode_io_qtypes.update({'DequantizeLinear': [['i', 2, activation_proto_qtype]]})
qnode_io_qtypes.update({'ConvInteger': [['i', 2, activation_proto_qtype]]})
check_qtype_by_node_type(self, model_i8_path, qnode_io_qtypes)
data_reader.rewind()
check_model_correctness(self, model_fp32_path, model_i8_path, data_reader.get_next(), rtol=rtol, atol=atol)
def test_static_mode_edge(self):
self.verify_quantize_with_pad_mode('edge', constant_value=None)
def test_static_mode_reflect(self):
self.verify_quantize_with_pad_mode('reflect', constant_value=None)
def test_static_mode_constant_default(self):
self.verify_quantize_with_pad_mode('constant', constant_value=None)
def test_static_mode_constant_value(self):
self.verify_quantize_with_pad_mode('constant', constant_value=3.75)
def test_static_mode_edge_s8s8(self):
self.verify_quantize_with_pad_mode('edge', constant_value=None, rtol=0.1, atol=0.1, activation_type=QuantType.QInt8,
weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
def test_static_mode_reflect_s8s8(self):
self.verify_quantize_with_pad_mode('reflect', constant_value=None, rtol=0.1, atol=0.1, activation_type=QuantType.QInt8,
weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
def test_static_mode_constant_default_s8s8(self):
self.verify_quantize_with_pad_mode('constant', constant_value=None, rtol=0.1, atol=0.1, activation_type=QuantType.QInt8,
weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
def test_static_mode_constant_value_s8s8(self):
self.verify_quantize_with_pad_mode('constant', constant_value=3.75, rtol=0.1, atol=0.1, activation_type=QuantType.QInt8,
weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
def test_dynamic_mode_edge(self):
self.verify_quantize_with_pad_mode('edge', constant_value=None, quantize_mode='dynamic')
def test_dynamic_mode_reflect(self):
self.verify_quantize_with_pad_mode('reflect', constant_value=None, quantize_mode='dynamic')
def test_dynamic_mode_constant_default(self):
self.verify_quantize_with_pad_mode('constant', constant_value=None, quantize_mode='dynamic')
def test_dynamic_mode_constant_value(self):
self.verify_quantize_with_pad_mode('constant', constant_value=3.75, quantize_mode='dynamic')
# TODO: uncomment following after ConvInteger s8 supported
# def test_dynamic_mode_edge_s8s8(self):
# self.verify_quantize_with_pad_mode('edge', constant_value=None, quantize_mode='dynamic', activation_type=QuantType.QInt8,
# weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
# def test_dynamic_mode_reflect_s8s8(self):
# self.verify_quantize_with_pad_mode('reflect', constant_value=None, quantize_mode='dynamic', activation_type=QuantType.QInt8,
# weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
# def test_dynamic_mode_constant_default_s8s8(self):
# self.verify_quantize_with_pad_mode('constant', constant_value=None, quantize_mode='dynamic', activation_type=QuantType.QInt8,
# weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
# def test_dynamic_mode_constant_value_s8s8(self):
# self.verify_quantize_with_pad_mode('constant', constant_value=3.75, quantize_mode='dynamic', activation_type=QuantType.QInt8,
# weight_type=QuantType.QInt8, extra_options={'ActivationSymmetric': True})
if __name__ == '__main__':
unittest.main()
| 59.344037 | 135 | 0.677205 |
fc8803ba4c30643d97603596570ca8651cea40ce | 4,829 | py | Python | build/lib.macosx-10.9-x86_64-3.9/mediapipe/calculators/core/flow_limiter_calculator_pb2.py | hanggaoh/mediapipe | 9eafb85cd7daa8986be7e4cc8af2f256f6f83451 | [
"Apache-2.0"
] | 2 | 2021-08-11T15:50:12.000Z | 2021-09-03T17:53:47.000Z | build/lib.macosx-10.9-x86_64-3.9/mediapipe/calculators/core/flow_limiter_calculator_pb2.py | hanggaoh/mediapipe | 9eafb85cd7daa8986be7e4cc8af2f256f6f83451 | [
"Apache-2.0"
] | 1 | 2022-01-20T11:17:50.000Z | 2022-01-20T11:17:50.000Z | build/lib.macosx-10.9-x86_64-3.9/mediapipe/calculators/core/flow_limiter_calculator_pb2.py | hanggaoh/mediapipe | 9eafb85cd7daa8986be7e4cc8af2f256f6f83451 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mediapipe/calculators/core/flow_limiter_calculator.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from mediapipe.framework import calculator_pb2 as mediapipe_dot_framework_dot_calculator__pb2
try:
mediapipe_dot_framework_dot_calculator__options__pb2 = mediapipe_dot_framework_dot_calculator__pb2.mediapipe_dot_framework_dot_calculator__options__pb2
except AttributeError:
mediapipe_dot_framework_dot_calculator__options__pb2 = mediapipe_dot_framework_dot_calculator__pb2.mediapipe.framework.calculator_options_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mediapipe/calculators/core/flow_limiter_calculator.proto',
package='mediapipe',
syntax='proto2',
serialized_options=b'\242\002\tMediaPipe',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n8mediapipe/calculators/core/flow_limiter_calculator.proto\x12\tmediapipe\x1a$mediapipe/framework/calculator.proto\"\xcd\x01\n\x1c\x46lowLimiterCalculatorOptions\x12\x18\n\rmax_in_flight\x18\x01 \x01(\x05:\x01\x31\x12\x17\n\x0cmax_in_queue\x18\x02 \x01(\x05:\x01\x30\x12\"\n\x11in_flight_timeout\x18\x03 \x01(\x03:\x07\x31\x30\x30\x30\x30\x30\x30\x32V\n\x03\x65xt\x12\x1c.mediapipe.CalculatorOptions\x18\xf8\xa0\xf4\x9b\x01 \x01(\x0b\x32\'.mediapipe.FlowLimiterCalculatorOptionsB\x0c\xa2\x02\tMediaPipe'
,
dependencies=[mediapipe_dot_framework_dot_calculator__pb2.DESCRIPTOR,])
_FLOWLIMITERCALCULATOROPTIONS = _descriptor.Descriptor(
name='FlowLimiterCalculatorOptions',
full_name='mediapipe.FlowLimiterCalculatorOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='max_in_flight', full_name='mediapipe.FlowLimiterCalculatorOptions.max_in_flight', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_in_queue', full_name='mediapipe.FlowLimiterCalculatorOptions.max_in_queue', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='in_flight_timeout', full_name='mediapipe.FlowLimiterCalculatorOptions.in_flight_timeout', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=1000000,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='ext', full_name='mediapipe.FlowLimiterCalculatorOptions.ext', index=0,
number=326963320, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=110,
serialized_end=315,
)
DESCRIPTOR.message_types_by_name['FlowLimiterCalculatorOptions'] = _FLOWLIMITERCALCULATOROPTIONS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FlowLimiterCalculatorOptions = _reflection.GeneratedProtocolMessageType('FlowLimiterCalculatorOptions', (_message.Message,), {
'DESCRIPTOR' : _FLOWLIMITERCALCULATOROPTIONS,
'__module__' : 'mediapipe.calculators.core.flow_limiter_calculator_pb2'
# @@protoc_insertion_point(class_scope:mediapipe.FlowLimiterCalculatorOptions)
})
_sym_db.RegisterMessage(FlowLimiterCalculatorOptions)
_FLOWLIMITERCALCULATOROPTIONS.extensions_by_name['ext'].message_type = _FLOWLIMITERCALCULATOROPTIONS
mediapipe_dot_framework_dot_calculator__options__pb2.CalculatorOptions.RegisterExtension(_FLOWLIMITERCALCULATOROPTIONS.extensions_by_name['ext'])
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 47.811881 | 522 | 0.807414 |
8fe338fdb4e92c4223b2ea0db035bd6d5fc39adc | 7,442 | py | Python | py_glo_boards_api/api.py | Transparent-CDN/py-glo-board | 39dc2a6a7a658490b4e572de2e540afdd6461f83 | [
"MIT"
] | 6 | 2019-12-12T13:10:18.000Z | 2021-11-03T08:37:31.000Z | py_glo_boards_api/api.py | Transparent-CDN/py-glo-board | 39dc2a6a7a658490b4e572de2e540afdd6461f83 | [
"MIT"
] | 2 | 2019-11-12T13:10:33.000Z | 2021-01-26T05:15:32.000Z | py_glo_boards_api/api.py | Transparent-CDN/py-glo-board | 39dc2a6a7a658490b4e572de2e540afdd6461f83 | [
"MIT"
] | 3 | 2019-10-22T10:08:55.000Z | 2021-11-23T17:55:52.000Z | import requests
import json
API_URL = "https://gloapi.gitkraken.com/v1/glo/{0}"
CONNECT_TIMEOUT = 3.5
def _make_request(token, method_name, method='get', params={}, json_params=None, files=None):
payload = {'access_token': token}
payload.update(params)
result = requests.request(method, API_URL.format(method_name), params=payload, json=json_params, files=files, timeout=CONNECT_TIMEOUT)
if result.status_code == 204:
return True
if result.status_code == 400 or result.status_code == 404:
raise GloException(result.text)
return result.json()
# Boards
def get_boards(token, fields, archived, page, per_page, sort):
method_name = 'boards'
params = {
'fields': fields,
'archived': archived,
'page': page,
'per_page': per_page,
'sort': sort
}
return _make_request(token, method_name, params=params)
def create_board(token, board_name):
method_name = 'boards'
data = {'name': board_name}
return _make_request(token, method_name, 'post', json_params=data)
def get_board(token, board_id, fields):
method_name = 'boards/{}'.format(board_id)
params = {
'fields': fields
}
return _make_request(token, method_name, params=params)
def edit_board(token, board_id, board_name):
method_name = 'boards/{}'.format(board_id)
data = {'name': board_name}
return _make_request(token, method_name, 'post', json_params=data)
def delete_board(token, board_id):
method_name = 'boards/{}'.format(board_id)
return _make_request(token, method_name, 'delete')
# Columns
def create_column(token, board_id, column_name, position=None):
method_name = 'boards/{}/columns'.format(board_id)
data = {'name': column_name}
if position:
data['position'] = position
return _make_request(token, method_name, 'post', json_params=data)
def edit_column(token, board_id, column_id, column_name, position=None):
method_name = 'boards/{}/columns/{}'.format(board_id, column_id)
data = {'name': column_name}
if position:
data['position'] = position
return _make_request(token, method_name, 'post', json_params=data)
def delete_column(token, board_id, column_id):
method_name = 'boards/{}/columns/{}'.format(board_id, column_id)
return _make_request(token, method_name, 'delete')
# Cards
def get_cards(token, board_id, fields, archived, page, per_page, sort):
method_name = 'boards/{}/cards'.format(board_id)
params = {
'fields': fields,
'archived': archived,
'page': page,
'per_page': per_page,
'sort': sort
}
return _make_request(token, method_name, 'get', params=params)
def get_cards_column(token, board_id, column_id, fields, archived, page, per_page, sort):
method_name = 'boards/{}/columns/{}/cards'.format(board_id, column_id)
params = {
'fields': fields,
'archived': archived,
'page': page,
'per_page': per_page,
'sort': sort
}
return _make_request(token, method_name, 'get', params=params)
def create_card(token, board_id, column_id, card_name, position=None, description=None,
assignees=None, labels=None, due_date=None):
method_name = 'boards/{}/cards'.format(board_id)
data = {'name': card_name,
'column_id': column_id}
if position:
data['position'] = position
if description:
data['description'] = description
if assignees:
data['assignees'] = assignees
if labels:
data['labels'] = labels
if due_date:
data['due_date'] = due_date
return _make_request(token, method_name, 'post', json_params=data)
def create_card_batch(token, board_id, batch):
method_name = 'boards/{}/cards/batch'.format(board_id)
return _make_request(token, method_name, 'post', json_params=batch)
def get_card(token, board_id, card_id, fields):
method_name = 'boards/{}/cards/{}'.format(board_id, card_id)
params = {
'fields': fields
}
return _make_request(token, method_name, 'get', params=params)
def edit_card(token, board_id, card_id, card_name, column_id=None, position=None, description=None,
assignees=None, labels=None, due_date=None):
method_name = 'boards/{}/cards/{}'.format(board_id, card_id)
data = {'name': card_name}
if position:
data['position'] = position
if description:
data['description'] = description
if assignees:
data['assignees'] = assignees
if labels:
data['labels'] = labels
if due_date:
data['due_date'] = due_date
if column_id:
data['column_id'] = column_id
return _make_request(token, method_name, 'post', json_params=data)
def delete_card(token, board_id, card_id):
method_name = 'boards/{}/cards/{}'.format(board_id, card_id)
return _make_request(token, method_name, 'delete')
# Labels
def create_label(token, board_id, label):
method_name = 'boards/{}/labels/'.format(board_id)
return _make_request(token, method_name, 'post', json_params=label.to_dict())
def edit_label(token, board_id, label_id, label):
method_name = 'boards/{}/labels/{}'.format(board_id, label_id)
return _make_request(token, method_name, 'post', json_params=label.to_dict())
def delete_label(token, board_id, label_id):
method_name = 'boards/{}/labels/{}'.format(board_id, label_id)
return _make_request(token, method_name, 'delete')
# Attachments
def get_attachments(token, board_id, card_id, fields, archived, page, per_page, sort):
method_name = 'boards/{}/cards/{}/attachments'.format(board_id, card_id)
params = {
'fields': fields,
'archived': archived,
'page': page,
'per_page': per_page,
'sort': sort
}
return _make_request(token, method_name, 'get', params=params)
def create_attachment(token, board_id, card_id, attachment):
method_name = 'boards/{}/cards/{}/attachments'.format(board_id, card_id)
files = {'filename': attachment}
return _make_request(token, method_name, 'post', files=files)
# Comments
def get_comments(token, board_id, card_id, fields, archived, page, per_page, sort):
method_name = 'boards/{}/cards/{}/comments'.format(board_id, card_id)
params = {
'fields': fields,
'archived': archived,
'page': page,
'per_page': per_page,
'sort': sort
}
return _make_request(token, method_name, 'get', params=params)
def create_comment(token, board_id, card_id, comment):
method_name = 'boards/{}/cards/{}/comments'.format(board_id, card_id)
data = {'text': comment}
return _make_request(token, method_name, 'post', json_params=data)
def edit_comment(token, board_id, card_id, comment_id, comment):
method_name = 'boards/{}/cards/{}/comments/{}'.format(board_id, card_id, comment_id)
data = {'text': comment}
return _make_request(token, method_name, 'post', json_params=data)
def delete_comment(token, board_id, card_id, comment_id):
method_name = 'boards/{}/cards/{}/comments/{}'.format(board_id, card_id, comment_id)
return _make_request(token, method_name, 'delete')
# User
def get_user(token, fields):
method_name = 'user'.format()
params = {
'fields': fields
}
return _make_request(token, method_name, 'get', params=params)
class GloException(Exception):
pass
| 31.533898 | 138 | 0.671056 |
6d6dde7710ca4e799380b51d27017835d98246df | 1,061 | py | Python | day-16/part-2/francisco.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 8 | 2019-12-01T08:56:46.000Z | 2019-12-05T21:21:12.000Z | day-16/part-2/francisco.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 10 | 2019-11-25T09:56:20.000Z | 2021-05-10T19:57:48.000Z | day-16/part-2/francisco.py | TPXP/adventofcode-2019 | ee653d6bfb510d14f2c2b3efc730d328c16b3f71 | [
"MIT"
] | 5 | 2019-12-01T08:19:57.000Z | 2020-11-23T09:50:19.000Z | from tool.runners.python import SubmissionPy
import itertools
import numpy as np
def fft_iter_2(l, offset, times=100):
# Key assumption for part 2
assert offset > 10000 * len(l) // 2
# l = np.array([l[i % len(l)] for i in range(offset, 10_000 * len(l))])
l = np.array(
list(
itertools.islice(
itertools.chain(l[offset % len(l) :], itertools.cycle(l)),
0,
10000 * len(l) - offset,
)
)
)
l = np.flip(l)
for i in range(times):
# simplified ftt computation using linear cumulative sum algorithm, because we
# have:
# fft[i] = sum(l[i] for i in range(i, len(l)))
np.cumsum(l, out=l)
np.mod(l, 10, out=l)
l = np.flip(l)
return l
def solve_part2(l):
offset = int("".join(str(i) for i in l[:7]))
return "".join(str(i) for i in fft_iter_2(l, offset)[:8])
class FranciscoSubmission(SubmissionPy):
def run(self, s):
l = [int(s) for s in s.strip()]
return solve_part2(l)
| 25.878049 | 86 | 0.549482 |
eb938b88cda4366a6ae98880a6b1151db9864c1c | 2,326 | py | Python | test/python/circuit/test_circuit_multi_registers.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | 22 | 2019-08-15T04:39:15.000Z | 2022-03-06T05:17:04.000Z | test/python/circuit/test_circuit_multi_registers.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | 2 | 2020-10-26T07:12:12.000Z | 2021-12-09T16:22:51.000Z | test/python/circuit/test_circuit_multi_registers.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | 9 | 2019-09-05T05:33:00.000Z | 2021-10-09T16:04:53.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test Qiskit's QuantumCircuit class for multiple registers."""
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit.converters.circuit_to_dag import circuit_to_dag
from qiskit.test import QiskitTestCase
from qiskit.exceptions import QiskitError
class TestCircuitMultiRegs(QiskitTestCase):
"""QuantumCircuit Qasm tests."""
def test_circuit_multi(self):
"""Test circuit multi regs declared at start.
"""
qreg0 = QuantumRegister(2, 'q0')
creg0 = ClassicalRegister(2, 'c0')
qreg1 = QuantumRegister(2, 'q1')
creg1 = ClassicalRegister(2, 'c1')
circ = QuantumCircuit(qreg0, qreg1)
circ.x(qreg0[1])
circ.x(qreg1[0])
meas = QuantumCircuit(qreg0, qreg1, creg0, creg1)
meas.measure(qreg0, creg0)
meas.measure(qreg1, creg1)
qc = circ + meas
circ2 = QuantumCircuit()
circ2.add_register(qreg0)
circ2.add_register(qreg1)
circ2.x(qreg0[1])
circ2.x(qreg1[0])
meas2 = QuantumCircuit()
meas2.add_register(qreg0)
meas2.add_register(qreg1)
meas2.add_register(creg0)
meas2.add_register(creg1)
meas2.measure(qreg0, creg0)
meas2.measure(qreg1, creg1)
qc2 = circ2 + meas2
dag_qc = circuit_to_dag(qc)
dag_qc2 = circuit_to_dag(qc2)
dag_circ2 = circuit_to_dag(circ2)
dag_circ = circuit_to_dag(circ)
self.assertEqual(dag_qc, dag_qc2)
self.assertEqual(dag_circ, dag_circ2)
def test_circuit_multi_name_collision(self):
"""Test circuit multi regs, with name collision.
"""
qreg0 = QuantumRegister(2, 'q')
qreg1 = QuantumRegister(3, 'q')
self.assertRaises(QiskitError, QuantumCircuit, qreg0, qreg1)
| 31.863014 | 77 | 0.6681 |
053ac0700d2dd8fad51c51732b85273d9be4255e | 3,641 | py | Python | api_yamdb/api/serializers.py | 4madeuz/DRF_exampe | ab4f155fcc0ed10912f8a05f57de6b45d4d836fd | [
"MIT"
] | null | null | null | api_yamdb/api/serializers.py | 4madeuz/DRF_exampe | ab4f155fcc0ed10912f8a05f57de6b45d4d836fd | [
"MIT"
] | null | null | null | api_yamdb/api/serializers.py | 4madeuz/DRF_exampe | ab4f155fcc0ed10912f8a05f57de6b45d4d836fd | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.shortcuts import get_object_or_404
from rest_framework import serializers
from rest_framework.relations import SlugRelatedField
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from reviews.models import Categories, Comment, Genres, Review, Title
User = get_user_model()
class CreateUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'email')
def validate_username(self, username):
if username != 'me':
return username
raise serializers.ValidationError(
'У вас не может быть username "me"(')
class ObtainTokenSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'confirmation_code')
read_only_fields = ['username']
class MyTokenObtainPairSerializer(TokenObtainPairSerializer):
confirmation_code = serializers.CharField(source='password')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self.fields['password']
def validate(self, attrs):
data = super().validate(attrs)
refresh = self.get_token(self.user)
del data['refresh']
data['access'] = str(refresh.access_token)
return data
class Meta:
fields = ['username', 'confirmation_code']
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'email', 'first_name',
'last_name', 'bio', 'role')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Categories
fields = ('name', 'slug')
class GenreSerializer(serializers.ModelSerializer):
class Meta:
model = Genres
fields = ('name', 'slug')
class TitleSerializer(serializers.ModelSerializer):
class Meta:
model = Title
fields = ('id', 'name', 'year', 'description',
'genre', 'category')
class ReadTitlesSerializer(TitleSerializer):
genre = GenreSerializer(many=True)
category = CategorySerializer()
rating = serializers.IntegerField()
class Meta:
model = Title
fields = (
'id', 'name', 'year', 'rating', 'description', 'genre', 'category')
class ActionTitlesSerializer(TitleSerializer):
genre = SlugRelatedField(slug_field='slug',
queryset=Genres.objects.all(), many=True)
category = SlugRelatedField(slug_field='slug',
queryset=Categories.objects.all())
class ReviewSerializer(serializers.ModelSerializer):
author = SlugRelatedField(
slug_field='username',
read_only=True,
default=serializers.CurrentUserDefault())
class Meta:
model = Review
fields = ['id', 'text', 'author', 'score', 'pub_date']
def validate(self, data):
if self.context['request'].method != 'POST':
return data
user = self.context['request'].user
title_id = self.context['view'].kwargs['title_id']
title = get_object_or_404(Title, pk=title_id)
if Review.objects.filter(
title=title,
author=user
).exists():
raise serializers.ValidationError('Вы уже оставили оценку')
return data
class CommentSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
slug_field='username',
read_only=True)
class Meta:
model = Comment
fields = ['id', 'text', 'author', 'pub_date']
| 28.669291 | 79 | 0.641857 |
bb72b867d7abab77336d59b8be4c703ff770e469 | 2,608 | py | Python | sample-code/Python/23 Analytics/get_analytics_summary.py | Mesitis/community | da5ae7d57d7ed4c4f55ef1671520ff32270ad213 | [
"MIT"
] | 2 | 2017-06-12T06:19:05.000Z | 2018-02-07T05:32:22.000Z | sample-code/Python/23 Analytics/get_analytics_summary.py | Mesitis/community | da5ae7d57d7ed4c4f55ef1671520ff32270ad213 | [
"MIT"
] | null | null | null | sample-code/Python/23 Analytics/get_analytics_summary.py | Mesitis/community | da5ae7d57d7ed4c4f55ef1671520ff32270ad213 | [
"MIT"
] | 2 | 2017-07-05T05:26:27.000Z | 2018-09-05T13:52:27.000Z | '''
- login and get token
- process 2FA if 2FA is setup for this account
- Returns analytics data for the given switch user
'''
import requests
import json
get_token_url = "https://api.canopy.cloud:443/api/v1/sessions/"
validate_otp_url = "https://api.canopy.cloud:443/api/v1/sessions/otp/validate.json" #calling the production server for OTP authentication
get_partner_users_url = "https://api.canopy.cloud:443/api/v1/admin/users.json"
get_analytics_summary_url = "https://api.canopy.cloud:443/api/v1/analytics/summary.json"
#please replace below with your username and password over here
username = 'userxxx'
password = 'passxxx'
#please enter the OTP token in case it is enabled
otp_code = '123456'
#first call for a fresh token
payload = "user%5Busername%5D=" + username + "&user%5Bpassword%5D=" + password
headers = {
'accept': "application/json",
'content-type':"application/x-www-form-urlencoded"
}
response = requests.request("POST", get_token_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
token = response.json()['token']
login_flow = response.json()['login_flow']
#in case 2FA is enabled use the OTP code to get the second level of authentication
if login_flow == '2fa_verification':
headers['Authorization'] = token
payload = 'otp_code=' + otp_code
response = requests.request("POST", validate_otp_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True) #print response.text
token = response.json()['token']
login_role = response.json()['role']
switch_user_id = response.json()['id']
if login_role == 'Partneradmin':
#print "============== partner's users ==========="
headers = {
'authorization': token,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8"
}
partner_users = []
response = requests.request("GET", get_partner_users_url, headers=headers)
for parent_user in response.json()['users']:
partner_users.append(parent_user['id'])
#print partner_users
#take the first users in the list as the switch_user_id
switch_user_id = partner_users[0]
#in case the user is a partner_admin then switch_user_id is any one of the users it has access to (here we take the first one from the list)
headers = {
'authorization': token,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8",
'x-app-switch-user': str(switch_user_id)
}
response = requests.request("GET", get_analytics_summary_url, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
| 33.87013 | 140 | 0.722009 |
2cc2ebe4f22fdc770184e319a719a08eb4c1e1fe | 8,378 | py | Python | dayu_job_center/src/server.py | phenom-films/dayu_job_center | bcdb8c22e6e41c88aeca6584f17f8d95cea89ef2 | [
"MIT"
] | 7 | 2018-10-11T07:44:38.000Z | 2021-05-26T16:34:38.000Z | dayu_job_center/src/server.py | phenom-films/dayu_job_center | bcdb8c22e6e41c88aeca6584f17f8d95cea89ef2 | [
"MIT"
] | null | null | null | dayu_job_center/src/server.py | phenom-films/dayu_job_center | bcdb8c22e6e41c88aeca6584f17f8d95cea89ef2 | [
"MIT"
] | 4 | 2019-07-19T01:03:38.000Z | 2022-02-16T13:43:17.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
__author__ = 'andyguo'
import datetime
import json
import threading
import zmq
from config import *
from util import tprint, recursive_update
from worker import LocalAsyncWorker
class ServerBase(object):
worker_count = None
workers = {}
available_workers = []
waiting_jobs = []
running_jobs = []
finished_jobs = []
is_running = False
def setup_connection(self):
raise NotImplementedError
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def pause(self, running_job_index):
raise NotImplementedError
def resume(self, running_job_index):
raise NotImplementedError
def abort(self, running_job_index):
raise NotImplementedError
def add_job(self, job):
raise NotImplementedError
def add_job_group(self, job_group):
raise NotImplementedError
class LocalAsyncServer(ServerBase):
def __init__(self, worker=4):
self.worker_count = worker
self.waiting_jobs = []
# self.waiting_jobs = [{'name' : u'哈哈',
# 'label' : None,
# 'job_id' : b'job_{}'.format(x),
# 'worker_id' : None,
# 'submission_type' : 'RandomSleepSubmission',
# 'status' : JOB_READY,
# 'job_data' : x,
# 'job_total' : 3,
# 'message' : None,
# 'start_time' : None,
# 'elapse_time' : '0:00:00',
# 'remaining_time' : '0:00:00',
# 'progress' : 0.0,
# 'before_start_callback': {'func': None, 'args': None, 'kwargs': None},
# 'after_finish_callback': {'func': None, 'args': None, 'kwargs': None}}
# for x in range(12)]
self.running_jobs = []
self.finished_jobs = []
self.is_running = False
self.context = zmq.Context.instance()
def add_job(self, job):
self.waiting_jobs.append(job)
def add_job_group(self, job_group):
self.waiting_jobs.append(job_group)
def resume(self, running_job_index):
if self.is_running is False:
return
worker_id = self.running_jobs[running_job_index]['worker_id'].encode('ascii')
self.control_socket.send_multipart([worker_id, JOB_RESUME])
def pause(self, running_job_index):
if self.is_running is False:
return
worker_id = self.running_jobs[running_job_index]['worker_id'].encode('ascii')
self.control_socket.send_multipart([worker_id, JOB_PAUSE])
def abort(self, running_job_index):
worker_id = self.running_jobs[running_job_index]['worker_id'].encode('ascii')
self.control_socket.send_multipart([worker_id, JOB_STOP])
def stop(self):
for w in self.workers:
self.control_socket.send_multipart([w, WORKER_EXIT])
exit_worker_count = 0
while exit_worker_count < self.worker_count:
message = self.control_socket.recv_multipart()
if message[-1] == WORKER_EXIT:
exit_worker_count += 1
self.is_running = False
self.job_socket.close()
self.control_socket.close()
self.context.term()
print 'server stop!'
# for t in self.worker_threads:
# print self.worker_threads[t].is_alive()
def setup_connection(self):
self.context = zmq.Context.instance()
self.job_socket = self.context.socket(zmq.ROUTER)
self.job_socket.bind('inproc://backend')
self.control_socket = self.context.socket(zmq.ROUTER)
self.control_socket.bind('inproc://worker_control')
def spawn_workers(self):
for x in range(self.worker_count):
w = LocalAsyncWorker(x)
t = threading.Thread(target=w.start)
self.workers[w.identity] = {'thread' : t,
'before_start_callback': {'func': None, 'args': None, 'kwargs': None},
'after_finish_callback': {'func': None, 'args': None, 'kwargs': None}}
t.start()
tprint('==== start worker thread ====')
while len(self.available_workers) < self.worker_count:
message = self.control_socket.recv_multipart()
if message[-1] == WORKER_IDLE:
self.available_workers.append(message[0])
tprint('==== all worker thread ready ====')
def send_job(self):
while self.available_workers and self.waiting_jobs:
job = self.waiting_jobs.pop(0)
worker_id = self.available_workers.pop(0)
self.before_start(job, worker_id)
job['worker_id'] = worker_id
self.running_jobs.append(job)
self.job_socket.send_multipart([worker_id, json.dumps(job)])
def before_start(self, job, worker_id):
self.workers[worker_id]['before_start_callback'].update(job.pop('before_start_callback'))
self.workers[worker_id]['after_finish_callback'].update(job.pop('after_finish_callback'))
func = self.workers[worker_id]['before_start_callback']['func']
if func:
func(*self.workers[worker_id]['before_start_callback']['args'],
**self.workers[worker_id]['before_start_callback']['kwargs'])
def update_time(self, job):
_progress = job['progress']
if _progress != 0:
_delta = datetime.datetime.now() - datetime.datetime.strptime(job['start_time'], DATETIME_FORMATTER)
job['elapse_time'] = str(_delta).split('.')[0]
job['remaining_time'] = \
str(datetime.timedelta(seconds=_delta.total_seconds() * (1.0 - _progress) / _progress)).split('.')[0]
def update_jobs(self):
message = self.job_socket.recv_multipart()
job = json.loads(message[-1])
job_id_list = [x['job_id'] for x in self.running_jobs]
job_index = job_id_list.index(job['job_id'])
recursive_update(self.running_jobs[job_index], (job))
for j in self.running_jobs:
self.update_time(j)
if j.has_key('children_jobs'):
for single_job in j['children_jobs']:
self.update_time(single_job)
if job['status'] == JOB_FINISHED:
worker_id = job['worker_id']
self.after_finish(job, worker_id)
self.available_workers.append(message[0])
self.finished_jobs.append(self.running_jobs.pop(job_index))
if job['status'] in (JOB_ERROR, JOB_UNKNOWN, JOB_STOP):
self.available_workers.append(message[0])
self.finished_jobs.append(self.running_jobs.pop(job_index))
def after_finish(self, job, worker_id):
func = self.workers[worker_id]['after_finish_callback']['func']
if func:
func(*self.workers[worker_id]['after_finish_callback']['args'],
**self.workers[worker_id]['after_finish_callback']['kwargs'])
def start(self):
from pprint import pprint
self.is_running = True
self.setup_connection()
self.spawn_workers()
while True:
if len(self.waiting_jobs) == 0 and len(self.running_jobs) == 0:
break
self.send_job()
self.update_jobs()
print '---------------'
pprint(self.running_jobs)
# print [(x['job_id'], x['progress']) for x in self.running_jobs]
self.stop()
if __name__ == '__main__':
from job import *
a = Job(job_total=3, submission_type='RandomSleepSubmission')
b = Job(job_total=3, submission_type='RandomSleepSubmission')
c = Job(job_total=3, submission_type='RandomSleepSubmission')
jg = JobGroup()
jg.add_job(a)
jg.add_job(b)
jg.add_job(c)
server = LocalAsyncServer()
server.add_job_group(jg)
server.start()
print server.finished_jobs
| 36.268398 | 117 | 0.578181 |
ba60bb52ccb7ec0a36b24ca5414b807bd7d8b5e6 | 466 | py | Python | bookr/reviews/migrations/0003_book_publication_date.py | ecedmondson/bookr | 0e8adcbad177bde48b975de422d9bccee09da4cc | [
"MIT"
] | null | null | null | bookr/reviews/migrations/0003_book_publication_date.py | ecedmondson/bookr | 0e8adcbad177bde48b975de422d9bccee09da4cc | [
"MIT"
] | 2 | 2022-02-05T03:07:06.000Z | 2022-02-05T03:07:37.000Z | bookr/reviews/migrations/0003_book_publication_date.py | ecedmondson/bookr | 0e8adcbad177bde48b975de422d9bccee09da4cc | [
"MIT"
] | null | null | null | # Generated by Django 4.0.2 on 2022-02-16 02:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("reviews", "0002_book_contributor_review_bookcontributor_and_more"),
]
operations = [
migrations.AddField(
model_name="book",
name="publication_date",
field=models.DateField(default=None, verbose_name="Date the book was published."),
),
]
| 24.526316 | 94 | 0.645923 |
a826d12645b1beb23a61caecc90cf5df08e3ae8a | 33,913 | py | Python | fullwavepy/ndat/arrays.py | kmch/FullwavePy | 3c704b9b6ae2c6c585adb61e57991caf30ab240e | [
"MIT"
] | 2 | 2020-12-24T01:02:16.000Z | 2021-02-17T10:00:58.000Z | fullwavepy/ndat/arrays.py | kmch/FullwavePy | 3c704b9b6ae2c6c585adb61e57991caf30ab240e | [
"MIT"
] | null | null | null | fullwavepy/ndat/arrays.py | kmch/FullwavePy | 3c704b9b6ae2c6c585adb61e57991caf30ab240e | [
"MIT"
] | null | null | null | """
Wrappers around NumPy arrays.
Notes
-----
Arr3d -> Arr2d etc. can only be achieved by their slice() methods
Numpy's slice-index notation A[:,0,:] etc. works (i.e. reshapes)
but doesn't convert the type
(c) 2019- Kajetan Chrapkiewicz.
Copywright: Ask for permission writing to k.chrapkiewicz17@imperial.ac.uk.
"""
import numpy as np
import matplotlib.pyplot as plt
from autologging import logged, traced
from matplotlib.gridspec import GridSpec
from fullwavepy.generic.parse import kw, del_kw
from fullwavepy.generic.decor import widgets, timer
from fullwavepy.plot.generic import figure, aspeqt, Plotter
from fullwavepy.plot.plt2d import plot_image
# -------------------------------------------------------------------------------
# Arrays - basic classes
# -------------------------------------------------------------------------------
@logged
class Arr(np.ndarray):
"""
Wrapper around numpy's array.
"""
def __new__(cls, source, ndims=None, **kwargs):
"""
Init by reading from source.
Notes
-----
From https://docs.scipy.org/doc/numpy/user/basics.subclassing.html:
Input array is an already formed ndarray instance
"""
if hasattr(source, 'extent'): # NOTE ADDED 12.01.2021
kwargs['extent'] = source.extent
source = cls._read(source, **kwargs)
obj = np.asarray(source).view(cls) # CAST THE TYPE
# FIXME: REPLACE IT WITH STH TAKING source
# AS ARG AND RETURNING EXTENT WHICH WE'LL ASSIGN TO obj JUST BEFORE RETURNING IT
# FROM THIS __new__ FUNCTION
obj = cls._set_extent(obj, **kwargs)
#obj = cls._set_coords(obj, **kwargs)
obj = cls._set_dx(obj, **kwargs)
if ndims is not None:
assert len(obj.shape) == ndims
return obj # NECESSARY!
# -----------------------------------------------------------------------------
def _read(source, **kwargs):
"""
"""
#from fullwavepy.seismic.data import Data
#from fullwavepy.ndat.manifs import Surf, SurfZ, Plane
#from fullwavepy.ndat.points import Points
#
#if (type(source) == type(np.array([])) or
# type(source) == Arr or
# type(source) == Arr1d or
# type(source) == Arr2d or
# type(source) == Arr3d or
# type(source) == Data or
# type(source) == Surf or
# type(source) == SurfZ or
# type(source) == Plane or
# type(source) == Points or
# type(source) == np.memmap):
# A = source
if isinstance(source, str):
from fullwavepy.ioapi.generic import read_any
if hasattr(source, 'shape'): # FOR EFFICIENCY (SEE read_any)
kwargs['shape'] = self.shape
A = read_any(source, **kwargs)
else:
A = source
#else:
# raise TypeError('Arguments need to be either ' +
# 'file-names or arrays or np.memmap, NOT: %s' %
# type(source))
return A
# -----------------------------------------------------------------------------
def _set_extent(obj, func=None, **kwargs):
if 'extent' in kwargs:
obj.__log.debug('Using extent from kwargs, even if it means overwriting')
obj.extent = kwargs['extent']
elif hasattr(obj, 'extent'):
obj.__log.debug('obj.extent already set and not provided in kwargs')
pass
else:
obj.__log.debug('Setting extent to default.')
obj.extent = obj._default_extent(func, **kwargs)
return obj
# -----------------------------------------------------------------------------
def _default_extent(obj, func=None, **kwargs):
"""
Redefined in child classes to account for vertical axis flipping
when plotting with imshow.
"""
if func is None:
func = lambda dim : [0, dim-1] # outdated: # NOT dim-1; SEE GridProjFile ETC.
extent = []
for dim in obj.shape:
extent.append(func(dim))
# if len(obj.shape) == 1:
# extent = extent[0]
return extent
# -----------------------------------------------------------------------------
def _set_dx(obj, **kwargs):
"""
It is fully determined by extent and shape.
In general, it is axis-dependent (dx != dy != dz != dx)
"""
dx = []
obj.__log.debug('obj.shape %s' % str(obj.shape))
obj.__log.debug('obj.extent %s' % str(obj.extent))
assert len(obj.shape) == len(obj.extent)
for nx, (x1, x2) in zip(obj.shape, obj.extent):
obj.__log.debug('nx=%s, x1=%s, x2=%s' % (nx, x1, x2))
dx_1D = (x2 - x1) / (nx-1) if nx > 1 else None
obj.__log.debug('dx_1D=%s' % dx_1D)
dx.append(dx_1D)
obj.dx = np.array(dx)
return obj
# -----------------------------------------------------------------------------
def _set_coords(obj, **kwargs):
obj.__log.debug('obj.extent' + str(obj.extent))
obj.__log.debug('Setting coords to None. Fill it with actual code')
obj.coords = None
return obj
# -----------------------------------------------------------------------------
def __array_finalize__(self, obj):
if obj is None: return
# -----------------------------------------------------------------------------
def _metre2index(self, m, axis, **kwargs):
origin = self.extent[axis][0]
i = (m - origin) / self.dx[axis]
if not i.is_integer():
raise ValueError('Index must be integer not %s' % i)
return int(i)
# -----------------------------------------------------------------------------
def _metre_2_nearest_index(self, m, axis, **kwargs):
"""
Better version of _metre2index used
by fwilight.ndat.A3d and A2d.
Parameters
----------
m : float
Value in metres.
axis : int
Axis of the array.
Returns
-------
int
Nearest index.
"""
origin = self.extent[axis][0]
i = (m - origin) / self.dx[axis]
if not i.is_integer():
print('Warning. Non-integer index. Taking its floor')
i = np.floor(i)
return int(i)
# -----------------------------------------------------------------------------
def _index2metre(self, i, axis, **kwargs):
origin = self.extent[axis][0]
m = i * self.dx[axis] + origin
return m
# -----------------------------------------------------------------------------
def _metre2gridnode(self, *args, **kwargs):
return self._metre2index(*args, **kwargs) + 1
# -----------------------------------------------------------------------------
def _box2inds(self, box, **kwargs):
"""
Convert box into slicing-indices using extent.
"""
box = np.array(box)
extent = np.array(self.extent)
assert len(box.shape) == 1
assert len(box) == len(extent.flatten())
box = box.reshape(extent.shape)
inds = np.zeros(box.shape)
for axis, _ in enumerate(box):
b0, b1 = box[axis]
if b0 == b1: # FOR 2D (DOUBLE-CHECK)
self.__log.warn('Skipping b0=b1=%s' % b0)
continue
inds[axis][0] = self._metre2index(b0, axis)
inds[axis][1] = self._metre2index(b1, axis) + 1 # NOTE: FOR np.arange(b1, b2) etc.
self.__log.debug('axis %s: i1=%s, i2=%s' % (axis, inds[axis][0], inds[axis][1]))
return inds.astype(int)
# -----------------------------------------------------------------------------
def carve(self, box, **kwargs):
"""
Carve a box out of an array.
Parameters
----------
box : list
Returns
-------
self
"""
inds = self._box2inds(box, **kwargs)
for axis in range(len(self.shape)):
self = np.take(self, np.arange(*inds[axis]), axis=axis)
self.extent = np.array(box).reshape(inds.shape)
return self
# -----------------------------------------------------------------------------
def save(self, fname, **kwargs):
from fullwavepy.ioapi.fw3d import save_vtr
save_vtr(self, fname)
# -----------------------------------------------------------------------------
def info(self, **kwargs):
self.__log.info('grid shape: {} [nodes]'.format(self.shape))
self.__log.info('grid cell-sizes in (x,y,z): {} [m]'.format(self.extent))
self.__log.info('grid extent: {} [m]'.format(self.extent))
self.__log.info('value min: {}, max: {}'.format(np.min(self), np.max(self)))
# -----------------------------------------------------------------------------
def compare(self, othe, mode='interleave', **kwargs): #fig, gs=None, widgets=False,
if mode == 'interleave' or mode == 'ileave':
A = self.interleave(othe, **kwargs)
A.plot(**kwargs)
# elif mode == 'diff' or mode == 'dif':
# c = A3d(self-othe, extent=self.extent)
# c.plot(**kwargs)
# return c
else:
raise ValueError(mode)
# -----------------------------------------------------------------------------
def compare_subplots(self, **kwargs):
assert type(self) == type(othe)
assert self.shape == othe.shape
xlim = kw('xlim', None, kwargs)
ylim = kw('ylim', None, kwargs)
if widgets:
figsize = (kw('figsize_x', 8, kwargs), kw('figsize_y', 8, kwargs))
fig = plt.figure(figsize=figsize)
kwargs['widgets'] = False
if gs is None:
gs = fig.add_gridspec(1,2)
ax1 = fig.add_subplot(gs[0,0])
self.plot(**kwargs)
ax2 = fig.add_subplot(gs[0,1])
othe.plot(**kwargs)
for ax in [ax1, ax2]:
ax.set_xlim(xlim)
ax.set_ylim(ylim)
@logged
class Arr1d(Arr):
"""
"""
def __new__(cls, source, **kwargs):
return super().__new__(cls, source, ndims=1, **kwargs)
# -----------------------------------------------------------------------------
def plot(self, **kwargs):
"""
format of extent: [[x1,x2]] is for compatibility with 2d and 3d
"""
from fullwavepy.plot.plt1d import plot_line
c = kw('c', None, kwargs)
assert np.array(self.extent).shape == (1,2)
self.__log.debug('self.extent' + str(self.extent))
kwargs['extent'] = self.extent[:2]
plot_line(self, **kwargs)
# x = np.linspace(x1, x2, len(self))
# plt.plot(x, self, c=c)
return plt.gca()
# -----------------------------------------------------------------------------
@logged
class Arr2d(Plotter, Arr):
"""
"""
def __new__(cls, source, **kwargs):
return super().__new__(cls, source, ndims=2, **kwargs)
# -----------------------------------------------------------------------------
###@widgets('slice_at', 'node')
def slice(self, slice_at='y', node=0, widgets=False, **kwargs):
"""
"""
di = {'x': 0, 'y': 1} # TRANSLATE slice_at INTO AXIS NO.
axis = di[slice_at]
A = Arr1d(np.take(self, indices=node, axis=axis))
assert len(self.extent) == 2
extent1d = np.array([el for i, el in enumerate(self.extent) if i != di[slice_at]])
self.__log.debug('extent1d %s' % str(extent1d))
A.extent = extent1d
return A
# -----------------------------------------------------------------------------
def interleave(self, othe, **kwargs):
self.interleaved = interleave_arrays(self, othe, **kwargs)
return self.interleaved
# -----------------------------------------------------------------------------
###@widgets('cmap', 'slice_at', 'node')
def plot_slice(self, slice_at='y', node=0, widgets=False, **kwargs):
"""
"""
arr1d = self.slice(slice_at, node, widgets=False, **kwargs)
ax = arr1d.plot(**kwargs)
return ax
# -----------------------------------------------------------------------------
def plot_full(self, wiggle=False, **kwargs):
"""
"""
kwargs['extent'] = np.ravel(self.extent) # ravel JUST IN CASE
# IT SHOULDN'T BE APPLIED TWICE!
self = modify_array(self, **kwargs) # FIXME: MOVE IT SOMEWHERE ELSE?!
if wiggle:
ax = plot_wiggl(self, **kwargs)
else:
ax = plot_image(self, **kwargs)
return ax
# -----------------------------------------------------------------------------
def plot(self, *args, **kwargs):
"""
"""
if 'slice_at' in kwargs:
ax = self.plot_slice(*args, **kwargs)
else:
ax = self.plot_full(*args, **kwargs)
return ax
# -----------------------------------------------------------------------------
#def compare(self, othe, **kwargs):
#A = self.interleave(othe, **kwargs)
#A.plot(**kwargs)
@logged
class Arr3d(Plotter, Arr):
"""
3D array.
"""
def __new__(cls, source, **kwargs):
return super().__new__(cls, source, ndims=3, **kwargs)
# -----------------------------------------------------------------------------
def slice(self, slice_at='y', node=0, widgets=False, **kwargs):
"""
"""
di = {'x': 0, 'y': 1, 'z': 2} # TRANSLATE slice_at INTO AXIS NO.
axis = di[slice_at]
A = Arr2d(np.take(self, indices=node, axis=axis))
assert len(self.extent) == 3
# extent2d = np.ravel([el for i, el in enumerate(self.extent) if i != di[slice_at]])
extent2d = np.array([el for i, el in enumerate(self.extent) if i != di[slice_at]])
# if axis != 2:
self.__log.debug('Setting extent2d so that no vertical-axis flipping is needed.')
self.__log.debug('NOW ALSO FOR zslice (NOT TESTED BUT SEEMS TO HAVE FIXED THE BUG)')
# extent2d[-2: ] = [extent2d[-1], extent2d[-2]]
extent2d[-1] = extent2d[-1][::-1]
self.__log.debug('extent2d: ' + str(extent2d))
A.extent = extent2d
return A
# -----------------------------------------------------------------------------
def interleave(self, othe, *args, **kwargs):
A1 = self.slice(*args, **kwargs)
A2 = othe.slice(*args, **kwargs)
A = Arr2d(interleave_arrays(A1, A2, **kwargs))
return A
# -----------------------------------------------------------------------------
def plot_slice(self, slice_at='y', node=None, widgets=False, **kwargs):
"""
"""
nx, ny, nz = self.shape
if node is None:
if slice_at == 'x':
node = kw('node', nx//2, kwargs)
# metre = self._index2metre(node, 0)
elif slice_at == 'y':
node = kw('node', ny//2, kwargs)
# metre = self._index2metre(node, 1)
elif slice_at == 'z':
node = kw('node', nz//2, kwargs)
# metre = self._index2metre(node, 2)
else:
raise ValueError('Wrong slice_at: %s' % str(slice_at))
arr2d = self.slice(slice_at, node, widgets=False, **kwargs)
suffix = kwargs.get('title', '')
if suffix is None:
kwargs['title'] = ''
else:
suffix = ', ' + suffix if suffix != '' else suffix
kwargs['title'] = 'Array slice at %s-index %s%s' % (slice_at, node, suffix)
del_kw('slice_at', kwargs) # JUST IN CASE
ax = arr2d.plot(**kwargs)
if slice_at == 'z': # DISABLE?
ax.invert_yaxis()
return ax
# -----------------------------------------------------------------------------
def plot_3slices_new2(self, x, y, z, fig=None, gs=None, **kwargs):
"""
"""
from fullwavepy.plot.plt2d import plot_image
layout = kw('layout', 'square', kwargs)
if fig is None:
fig = figure(16,8)
kwargs['x'] = x
kwargs['y'] = y
kwargs['z'] = z
# LABELS FOR EACH AXIS
s2 = kw('slice', 'y', kwargs) # MAIN SLICE PLOTTED AT THE BOTTOM IN FULL WIDTH
s0, s1 = [i for i in ['x', 'y', 'z'] if i != s2]
s = [s0, s1, s2]
# CONVERT THE LABELS INTO ARRAY DIMENSIONS (AXES)
convert_s2a = {'x': 0, 'y': 1, 'z': 2} # TRANSLATE slice TO axis
if layout == 'square':
if gs is None:
gs = GridSpec(2,2, height_ratios=[1,1], width_ratios=[2,1])
axes = list(np.zeros(3))
axes[0] = fig.add_subplot(gs[0,0])
axes[1] = fig.add_subplot(gs[1,0])
axes[2] = fig.add_subplot(gs[:,1])
elif layout == 'thin':
if gs is None:
gs = GridSpec(3,1)
axes = list(np.zeros(3))
axes[0] = fig.add_subplot(gs[0,0])
axes[1] = fig.add_subplot(gs[1,0])
axes[2] = fig.add_subplot(gs[2,0])
else:
raise ValueError('Unknown layout: %s' % layout)
kwargs['vmin'] = kw('vmin', np.min(self), kwargs)
kwargs['vmax'] = kw('vmax', np.max(self), kwargs)
self.__log.debug('Setting vmin, vmax to: {}, {}'.format(kwargs['vmin'],
kwargs['vmax']))
for i, ax in enumerate(axes):
plt.sca(ax)
aaxx = plot_image(np.take(self, kwargs[s[i]], convert_s2a[s[i]]), **kwargs)
aspeqt(aaxx)
# PLOT SLICING LINES
a, b = [j for j in ['x', 'y', 'z'] if j != s[i]]
abcissae_horiz = range(self.shape[convert_s2a[a]])
ordinate_horiz = np.full(len(abcissae_horiz), kwargs[b])
ordinate_verti = range(self.shape[convert_s2a[b]])
abcissae_verti = np.full(len(ordinate_verti), kwargs[a])
if s[i] == 'z':
abcissae_horiz, ordinate_horiz, abcissae_verti, ordinate_verti = abcissae_verti, ordinate_verti, abcissae_horiz, ordinate_horiz
ax.invert_yaxis()
plt.plot(abcissae_horiz, ordinate_horiz, '--', c='white')
plt.plot(abcissae_verti, ordinate_verti, '--', c='white')
return plt.gca()
# -----------------------------------------------------------------------------
def plot_3slices_new1(self, x, y, z, fig=None, contour=None, **kwargs):
if fig is None:
fig = figure(16,6)
kwargs['vmin'] = kw('vmin', np.min(self), kwargs)
kwargs['vmax'] = kw('vmax', np.max(self), kwargs)
self.__log.debug('Setting vmin, vmax to: {}, {}'.format(kwargs['vmin'],
kwargs['vmax']))
# kwargs = dict(overwrite=0, overwrite_mmp=0, vmin=1500, vmax=7000, cmap='hsv')
gs = fig.add_gridspec(2,2, height_ratios=[1,1], width_ratios=[2,1])
fig.add_subplot(gs[0,0])
ax = p.out.vp.it[it].plot(x=x, **kwargs)
aspeqt(ax)
fig.add_subplot(gs[1,0])
ax = p.out.vp.it[it].plot(y=y, **kwargs)
aspeqt(ax)
fig.add_subplot(gs[:,1])
ax = p.out.vp.it[it].plot(z=z, **kwargs)
if contour is not None:
colors = kw('colors', 'k', kwargs)
levels = kw('levels', 40, kwargs)
plt.contour(surf[...,0].T, extent=np.array(surf.extent[:-1]).flatten(), \
colors=colors, levels=levels, alpha=0.4)
ax.set_xlim(self.extent[ :2])
ax.set_ylim(self.extent[2:4])
aspeqt(ax)
return ax
# -----------------------------------------------------------------------------
def plot_3slices(self, x, y, z, fig=None, gs=None, **kwargs):
"""
"""
from fullwavepy.plot.plt2d import plot_image
# layout = kw('layout', None, kwargs)
# if layout is None:
if fig is None:
fig = figure(16,8)
kwargs['x'] = x
kwargs['y'] = y
kwargs['z'] = z
# LABELS FOR EACH AXIS
s2 = kw('slice', 'y', kwargs) # MAIN SLICE PLOTTED AT THE BOTTOM IN FULL WIDTH
s0, s1 = [i for i in ['x', 'y', 'z'] if i != s2]
s = [s0, s1, s2]
# CONVERT THE LABELS INTO ARRAY DIMENSIONS (AXES)
convert_s2a = {'x': 0, 'y': 1, 'z': 2} # TRANSLATE slice TO axis
if gs is None:
gs = GridSpec(2,2)
axes = list(np.zeros(3))
axes[0] = fig.add_subplot(gs[0,0])
axes[1] = fig.add_subplot(gs[0,1])
axes[2] = fig.add_subplot(gs[1,:])
kwargs['vmin'] = kw('vmin', np.min(self), kwargs)
kwargs['vmax'] = kw('vmax', np.max(self), kwargs)
self.__log.debug('Setting vmin, vmax to: {}, {}'.format(kwargs['vmin'],
kwargs['vmax']))
for i, ax in enumerate(axes):
plt.sca(ax)
aaxx = plot_image(np.take(self, kwargs[s[i]], convert_s2a[s[i]]), **kwargs)
# PLOT SLICING LINES
a, b = [j for j in ['x', 'y', 'z'] if j != s[i]]
abcissae_horiz = range(self.shape[convert_s2a[a]])
ordinate_horiz = np.full(len(abcissae_horiz), kwargs[b])
ordinate_verti = range(self.shape[convert_s2a[b]])
abcissae_verti = np.full(len(ordinate_verti), kwargs[a])
if s[i] == 'z':
abcissae_horiz, ordinate_horiz, abcissae_verti, ordinate_verti = abcissae_verti, ordinate_verti, abcissae_horiz, ordinate_horiz
ax.invert_yaxis()
plt.plot(abcissae_horiz, ordinate_horiz, '--', c='white')
plt.plot(abcissae_verti, ordinate_verti, '--', c='white')
# -----------------------------------------------------------------------------
def plot_3slices_old1(self, fig=None, gs=None, widgets=False, **kwargs):
"""
"""
from fullwavepy.plot.plt2d import plot_image
if fig is None:
fig = figure(16,8)
kwargs['x'] = kw('x', 0, kwargs)
kwargs['y'] = kw('y', 0, kwargs)
kwargs['z'] = kw('z', 0, kwargs)
# LABELS FOR EACH AXIS
s2 = kw('slice', 'y', kwargs) # MAIN SLICE PLOTTED AT THE BOTTOM IN FULL WIDTH
s0, s1 = [i for i in ['x', 'y', 'z'] if i != s2]
s = [s0, s1, s2]
# CONVERT THE LABELS INTO ARRAY DIMENSIONS (AXES)
convert_s2a = {'x': 0, 'y': 1, 'z': 2} # TRANSLATE slice TO axis
#if widgets: #FIXME BOILERPLATE
#figsize = (kw('figsize_x', 8, kwargs), kw('figsize_y', 8, kwargs))
#fig = plt.figure(figsize=figsize)
if gs is None:
gs = GridSpec(2,2)
#gs = fig.add_gridspec(2,2)
if widgets: #or fig is None:
fig = figure(**kwargs)
gs = fig.add_gridspec(2,2)
axes = list(np.zeros(3))
axes[0] = fig.add_subplot(gs[0,0])
axes[1] = fig.add_subplot(gs[0,1])
axes[2] = fig.add_subplot(gs[1,:])
kwargs['vmin'] = kw('vmin', np.min(self), kwargs)
kwargs['vmax'] = kw('vmax', np.max(self), kwargs)
self.__log.debug('Setting vmin, vmax to: {}, {}'.format(kwargs['vmin'],
kwargs['vmax']))
kwargs['widgets'] = False
self.__log.debug('Disabling widgets in inner functions.')
for i, ax in enumerate(axes):
plt.sca(ax)
plot_image(np.take(self, kwargs[s[i]], convert_s2a[s[i]]), **kwargs)
# PLOT SLICING LINES
a, b = [j for j in ['x', 'y', 'z'] if j != s[i]]
abcissae_horiz = range(self.shape[convert_s2a[a]])
ordinate_horiz = np.full(len(abcissae_horiz), kwargs[b])
ordinate_verti = range(self.shape[convert_s2a[b]])
abcissae_verti = np.full(len(ordinate_verti), kwargs[a])
if s[i] == 'z':
abcissae_horiz, ordinate_horiz, abcissae_verti, ordinate_verti = abcissae_verti, ordinate_verti, abcissae_horiz, ordinate_horiz
ax.invert_yaxis()
plt.plot(abcissae_horiz, ordinate_horiz, '--', c='white')
plt.plot(abcissae_verti, ordinate_verti, '--', c='white')
#return ax1, ax2, ax3
# -----------------------------------------------------------------------------
def plot(self, *args, **kwargs):
"""
Framework plotter.
Notes
-----
This is a preferred function to call rather than
plot_3slices directly. This is because plot
formatting is set in subclasses by overwriting
plot method. This could be avoided by defining
_format_plot() method or similar.
Note, it doesn't need to have ##@widgets!
"""
if not ('x' in kwargs or 'y' in kwargs or 'z' in kwargs):
nslices = 1
elif 'x' in kwargs and not ('y' in kwargs or 'z' in kwargs):
nslices = 1
kwargs['slice_at'] = 'x'
kwargs['node'] = kwargs['x']
elif 'y' in kwargs and not ('x' in kwargs or 'z' in kwargs):
nslices = 1
kwargs['slice_at'] = 'y'
kwargs['node'] = kwargs['y']
elif 'z' in kwargs and not ('x' in kwargs or 'y' in kwargs):
nslices = 1
kwargs['slice_at'] = 'z'
kwargs['node'] = kwargs['z']
elif 'x' in kwargs and 'y' in kwargs and 'z' in kwargs:
nslices = 3
else:
raise ValueError('Slicing arguments not understood.')
if nslices == 1:
self.plot_slice(*args, **kwargs)
elif nslices == 3:
self.plot_3slices(*args, **kwargs)
else:
raise ValueError('Wrong value of nslices: %s' %str(nslices))
return plt.gca()
# -----------------------------------------------------------------------------
def scroll(self, **kwargs):
"""
"""
import matplotlib.pyplot as plt
from fullwavepy.plot.events import IndexTracker
fig, ax = plt.subplots(1, 1)
tracker = IndexTracker(ax, self, **kwargs)
return fig, ax, tracker
# -----------------------------------------------------------------------------
def scrollall(self, fig, **kwargs):
"""
To make it work in a jupyter notebook:
%matplotlib notebook
%matplotlib notebook
fig = plt.figure(figsize=(5,20))
tracker = some_array.scrollall(fig, cmap='viridis')
fig.canvas.mpl_connect('scroll_event', tracker.onscroll)
"""
from fullwavepy.plot.events import IndexTrackerAll
tracker = IndexTrackerAll(fig, self, **kwargs)
return tracker
#return tracker.onscroll
# -------------------------------------------------------------------------------
# Arrays - newer classes
# -------------------------------------------------------------------------------
@logged
class A3d(Arr3d):
"""
Thin wrapper around Arr3d
to test new features before
modifying it.
Parameters
----------
Arr3d : class
3d array.
"""
def plot_slice(self, coord, unit='n', axis='y', **kwargs):
"""
Facilitate different units.
Parameters
----------
coord : float
Value of a coordinate (axis specified below)
unit : str, optional
Unit of coord can be nodes or metres, by default 'n'
axis : str, optional
Axis along which coordinate is measured, by default 'y'
Returns
-------
axis
Axis of the plot.
Raises
------
IndexError
If exceeds the axis size.
"""
kwargs['slice_at'] = axis
axis_id = dict(x=0, y=1, z=2)[axis]
if unit == 'n':
kwargs['node'] = coord
elif unit == 'm':
i = self._metre_2_nearest_index(coord, axis_id)
if (i < 0) or (i >= self.shape[axis_id]):
raise IndexError('Incorrect array index: %s' %i)
title = '%s=%s m' % (axis, coord)
kwargs['title'] = title
kwargs['node'] = i
else:
NIErr()
return super().plot_slice(**kwargs)
# -----------------------------------------------------------------------------
def slice_old(self, slice_at='y', node=0, widgets=False, **kwargs):
"""
"""
di = {'x': 0, 'y': 1, 'z': 2} # TRANSLATE slice_at INTO AXIS NO.
axis = di[slice_at]
A = Arr2d(np.take(self, indices=node, axis=axis))
assert len(self.extent) == 3
# extent2d = np.ravel([el for i, el in enumerate(self.extent) if i != di[slice_at]])
extent2d = np.array([el for i, el in enumerate(self.extent) if i != di[slice_at]])
# if axis != 2:
self.__log.debug('Setting extent2d so that no vertical-axis flipping is needed.')
self.__log.debug('NOW ALSO FOR zslice (NOT TESTED BUT SEEMS TO HAVE FIXED THE BUG)')
# extent2d[-2: ] = [extent2d[-1], extent2d[-2]]
extent2d[-1] = extent2d[-1][::-1]
self.__log.debug('extent2d: ' + str(extent2d))
A.extent = extent2d
return A
# -----------------------------------------------------------------------------
@logged
class A2d(Arr2d):
def _metre_2_nearest_index(self, m, axis, **kwargs):
origin = self.extent[axis][0]
i = (m - origin) / self.dx[axis]
if not i.is_integer():
print('Warning. Non-integer index. Taking its floor')
i = np.floor(i)
return int(i)
def plot_slice(self, coord, unit='n', axis='y', **kwargs):
kwargs['slice_at'] = axis
axis_id = dict(x=0, y=1, z=2)[axis]
if unit == 'n':
kwargs['node'] = coord
elif unit == 'm':
i = self._metre_2_nearest_index(coord, axis_id)
if (i < 0) or (i >= self.shape[axis_id]):
raise IndexError('Incorrect array index: %s' %i)
kwargs['title'] = '%s=%s m' % (axis, coord)
kwargs['node'] = i
else:
NIErr()
return super().plot_slice(**kwargs)
# -------------------------------------------------------------------------------
# Array transformations
# -------------------------------------------------------------------------------
@timer
@logged
def modify_array(A, *args, **kwargs):
"""
Modify each trace (last dimension)
of a 1D/2D/3D array using a list
of functions.
Parameters
----------
A : array
1D/2D/3D array.
**kwargs : keyword arguments (optional)
Current capabilities:
modifiers : list
List of functions to apply subsequently
on each trace. The order is following:
[func1, func2, ...]
first func1 will be applied and
followed by func2, etc.
Note that it is different from
the composite function's notation:
...(func2(func1(trace))
Modifiers are set up in a separate function
for cleanliness.
Modifiers are allowed to have *args and **kwargs
so lambda functions are not recommended as
modifiers.
Returns
-------
Modified A.
Notes
-----
Always modify trace-wise where trace
is the last dimension of the array.
"""
array_modifiers = _set_array_modifiers(**kwargs)
tracewise_modifiers = _set_tracewise_modifiers(**kwargs)
A = np.array(A)
for func in array_modifiers:
A = func(A, *args, **kwargs)
for func in tracewise_modifiers:
A = np.apply_along_axis(func, -1, A, *args, **kwargs)
return A
@logged
def _set_array_modifiers(**kwargs):
"""
Notes
-----
norm_bulk acts on the whole array,
and norm acts trace-wise, but they both
call the same function. FIXME: common interface
"""
#from ..dsp.su import su_process
from fullwavepy.numeric.generic import norm_bulk_max
modifiers = kw('array_modifiers', [], kwargs)
clip = kw('clip', None, kwargs)
clip_min = kw('clip_min', None, kwargs)
clip_max = kw('clip_max', None, kwargs)
norm_bulk = kw('norm_bulk', None, kwargs)
func = kw('func', None, kwargs)
# bulk-normalization (must be BEFORE clipping)
if norm_bulk is not None:
modifiers.append(norm_bulk_max)
if clip is not None or clip_min is not None or clip_max is not None:
modifiers.append(clip_array)
#if func is not None:
#modifiers.append(su_process)
return modifiers
@logged
def _set_tracewise_modifiers(**kwargs):
"""
Set a list of functions to modify
a trace / an array of traces.
Parameters
----------
**kwargs : keyword arguments (optional)
Current capabilities:
modifiers : list
List of functions to apply subsequently
on each trace. The order is following:
[func1, func2, ...]
first func1 will be applied and so on.
Note that order of the elements is
opposite to the composite function's
notation:
...(func2(func1(trace))
Returns
-------
modifiers : list
List of modifiers.
Notes
-----
The order matters, they don't commute in general.
We could use lambda functions, but we want to
pass **kwargs to modifiers, and it is bad to
define lambda functions with *args, **kwargs.
Clipping is done before normalization.
"""
from fullwavepy.numeric.generic import normalize
from fullwavepy.numeric.operators import derivative
from fullwavepy.numeric.fourier import dft
modifiers = kw('tracewise_modifiers', [], kwargs)
norm = kw('norm', None, kwargs)
spect = kw('spect', None, kwargs)
deriv = kw('deriv', None, kwargs)
# DERIVATIVE
if deriv is not None:
modifiers.append(derivative)
# DISCRETE FOURIER TRANSFORM
if spect is not None:
modifiers.append(dft)
# NORMALIZATION
if norm is not None:
modifiers.append(normalize)
return modifiers
@logged
def clip_array(A, clip=None, **kwargs):
"""
clip : float
Convenience to define both bounds
at once as [-clip, clip]
"""
clip_min = kw('clip_min', None, kwargs)
clip_max = kw('clip_max', None, kwargs)
if clip is not None:
clip_min = -clip
clip_max = clip
return np.clip(A, clip_min, clip_max)
@logged
def interleave_arrays(A1, A2, **kwargs):
"""
Create an array composed of
interleaved arrays Z1 & Z2.
Parameters
----------
A1, A2 : arrays
2D arrays to interleave.
**kwargs : keyword arguments (optional)
- chunk_size : int
No. of columns of 1 array
before being proceeded
by 2nd array etc.
Returns
-------
Z : array
2D array.
Notes
-----
"""
chunk_size = kw('chunk_size', 10, kwargs)
assert len(A1.shape) == 2
if A1.shape != A2.shape:
raise ValueError('Arrays must have same shapes.')
A = np.array(A1)
ncols = A.shape[0]
if ncols < 2 * chunk_size:
interleave_arrays._log.warning('No. of columns=' + str(ncols) +
' < 2 * chunk_size! Outputting empty array')
return []
nchunks = ncols // chunk_size // 2
for i, Ai in enumerate([A1, A2]):
i_start = i * chunk_size
for j in range(nchunks):
i1 = i_start + j * 2 * chunk_size
i2 = i_start + j * 2 * chunk_size + (chunk_size) # IT USED TO BE WRONG (-1)
A[i1 : i2] = Ai[i1 : i2]
A = np.array(A)
return A
# -------------------------------------------------------------------------------
# Array converters
# -------------------------------------------------------------------------------
@logged
def tseries2array(tseries, **kwargs):
"""
Convert 1d time series into
a 3d array (fw3d format).
"""
A = np.zeros((1, 1, len(tseries)))
A[0][0] = tseries
return A
# FIXME: MOVE SOMWHERE ELSE
@logged
def list2str(li, **kwargs):
"""
For SU.
"""
s = ''
for i in range(len(li)):
s += str(li[i]) + ','
s = s[:-1] # REMOVE THE TRAILING COMMA
return s
| 31.342884 | 135 | 0.528942 |
cd009b6a376ab491d7653d406cd8f30a59d2e0c0 | 498 | py | Python | tools/first_last_name.py | lucasayres/python-tools | 686b84986aae1b1714fa5645b1f2a3fd6ef8355d | [
"MIT"
] | 71 | 2018-06-28T17:38:15.000Z | 2022-02-08T17:42:42.000Z | tools/first_last_name.py | DalavanCloud/python-tools | 686b84986aae1b1714fa5645b1f2a3fd6ef8355d | [
"MIT"
] | null | null | null | tools/first_last_name.py | DalavanCloud/python-tools | 686b84986aae1b1714fa5645b1f2a3fd6ef8355d | [
"MIT"
] | 14 | 2018-07-08T03:29:29.000Z | 2022-03-22T21:04:39.000Z | # -*- coding: utf-8 -*-
def first_last_name(name):
"""Extract the first and last name from a full name.
Args:
name (str): Full name.
Returns:
str: Return 'first_name last_name'.
"""
name = name.strip().split()
first_last = ''
if len(name):
first = name[0]
last = ' '.join(name[-1:])
if last == first or last == '':
first_last = first
else:
first_last = first + ' ' + last
return first_last
| 21.652174 | 56 | 0.514056 |
33c6f0b828a25f84b51c08afbd165bd11237bafc | 487 | py | Python | Curso_Python_3_UDEMY/programacao_funcional/imutabilidade_v2.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
] | null | null | null | Curso_Python_3_UDEMY/programacao_funcional/imutabilidade_v2.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
] | null | null | null | Curso_Python_3_UDEMY/programacao_funcional/imutabilidade_v2.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
] | null | null | null | #! python3
from locale import setlocale, LC_ALL
from calendar import mdays, month_name
from functools import reduce
# Português do Brasil
setlocale(LC_ALL, 'pt_BR')
def mes_com_31(mes): return mdays[mes] == 31
def get_nome_mes(mes): return month_name[mes]
def juntar_meses(todos, nome_mes): return f'{todos}\n- {nome_mes}'
print(reduce(juntar_meses,
map(get_nome_mes, filter(mes_com_31, range(1, 13))),
'Meses com 31 dias:'))
| 21.173913 | 66 | 0.671458 |
dabff48250b9e310723721ce9c239af7c942deca | 401 | py | Python | convert_spm_to_hf.py | bigscience-workshop/tokenization | 9fcf1bfdb95ee8849486ab2e5a1b354d7a8d5270 | [
"Apache-2.0"
] | 2 | 2021-10-19T00:08:34.000Z | 2022-02-03T09:10:29.000Z | convert_spm_to_hf.py | bigscience-workshop/tokenization | 9fcf1bfdb95ee8849486ab2e5a1b354d7a8d5270 | [
"Apache-2.0"
] | null | null | null | convert_spm_to_hf.py | bigscience-workshop/tokenization | 9fcf1bfdb95ee8849486ab2e5a1b354d7a8d5270 | [
"Apache-2.0"
] | 1 | 2021-08-09T03:02:00.000Z | 2021-08-09T03:02:00.000Z | from transformers.convert_slow_tokenizer import SpmConverter
import sys
class SPMTokenizer:
def __init__(self, vocab_file):
self.vocab_file = vocab_file
input_file = sys.argv[1] + '.model'
output_file = sys.argv[1] + '.json'
original_tokenizer = SPMTokenizer(input_file)
converter = SpmConverter(original_tokenizer)
tokenizer = converter.converted()
tokenizer.save(output_file)
| 26.733333 | 60 | 0.765586 |
a4da58d32509cc71cf36716bd685f666004c84e2 | 6,767 | py | Python | rotkehlchen/externalapis/coingecko.py | davbre/rotki | bab693a98a1834df2f983db100e293bd5a44ab0c | [
"BSD-3-Clause"
] | 1 | 2020-11-14T12:20:37.000Z | 2020-11-14T12:20:37.000Z | rotkehlchen/externalapis/coingecko.py | davbre/rotki | bab693a98a1834df2f983db100e293bd5a44ab0c | [
"BSD-3-Clause"
] | null | null | null | rotkehlchen/externalapis/coingecko.py | davbre/rotki | bab693a98a1834df2f983db100e293bd5a44ab0c | [
"BSD-3-Clause"
] | null | null | null | import json
import logging
from typing import Any, Dict, List, NamedTuple, Optional, Union, overload
from urllib.parse import urlencode
import requests
from typing_extensions import Literal
from rotkehlchen.assets.asset import Asset
from rotkehlchen.constants import ZERO
from rotkehlchen.errors import RemoteError
from rotkehlchen.fval import FVal
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.typing import Price
from rotkehlchen.utils.serialization import rlk_jsonloads
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
class CoingeckoImageURLs(NamedTuple):
thumb: str
small: str
large: str
class CoingeckoAssetData(NamedTuple):
identifier: str
symbol: str
name: str
description: str
images: CoingeckoImageURLs
COINGECKO_SIMPLE_VS_CURRENCIES = [
"btc",
"eth",
"ltc",
"bch",
"bnb",
"eos",
"xrp",
"xlm",
"link",
"dot",
"yfi",
"usd",
"aed",
"ars",
"aud",
"bdt",
"bhd",
"bmd",
"brl",
"cad",
"chf",
"clp",
"cny",
"czk",
"dkk",
"eur",
"gbp",
"hkd",
"huf",
"idr",
"ils",
"inr",
"jpy",
"krw",
"kwd",
"lkr",
"mmk",
"mxn",
"myr",
"nok",
"nzd",
"php",
"pkr",
"pln",
"rub",
"sar",
"sek",
"sgd",
"thb",
"try",
"twd",
"uah",
"vef",
"vnd",
"zar",
"xdr",
"xag",
"xau",
]
class Coingecko():
def __init__(self) -> None:
self.session = requests.session()
self.session.headers.update({'User-Agent': 'rotkehlchen'})
@overload # noqa: F811
def _query(
self,
module: Literal['coins/list'],
subpath: Optional[str] = None,
options: Optional[Dict[str, Any]] = None,
) -> List[Dict[str, Any]]:
...
@overload # noqa: F811
def _query(
self,
module: Literal['coins', 'simple/price'],
subpath: Optional[str] = None,
options: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
...
def _query(
self,
module: str,
subpath: Optional[str] = None,
options: Optional[Dict[str, Any]] = None,
) -> Union[Dict[str, Any], List[Dict[str, Any]]]:
"""Performs a coingecko query
May raise:
- RemoteError if there is a problem querying coingecko
"""
if options is None:
options = {}
url = f'https://api.coingecko.com/api/v3/{module}/'
if subpath:
url += subpath
try:
response = self.session.get(f'{url}?{urlencode(options)}')
except requests.exceptions.ConnectionError as e:
raise RemoteError(f'Coingecko API request failed due to {str(e)}')
if response.status_code != 200:
raise RemoteError(
f'Coingecko API request {response.url} failed with HTTP status '
f'code: {response.status_code}',
)
try:
decoded_json = rlk_jsonloads(response.text)
except json.decoder.JSONDecodeError as e:
raise RemoteError(f'Invalid JSON in Kraken response. {e}')
return decoded_json
def asset_data(self, asset: Asset) -> CoingeckoAssetData:
"""
May raise:
- UnsupportedAsset() if the asset is not supported by coingecko
- RemoteError if there is a problem querying coingecko
"""
options = {
# Include all localized languages in response (true/false) [default: true]
'localization': False,
# Include tickers data (true/false) [default: true]
'tickers': False,
# Include market_data (true/false) [default: true]
'market_data': False,
# Include communitydata (true/false) [default: true]
'community_data': False,
# Include developer data (true/false) [default: true]
'developer_data': False,
# Include sparkline 7 days data (eg. true, false) [default: false]
'sparkline': False,
}
gecko_id = asset.to_coingecko()
data = self._query(
module='coins',
subpath=f'{gecko_id}',
options=options,
)
try:
parsed_data = CoingeckoAssetData(
identifier=gecko_id,
symbol=data['symbol'],
name=data['name'],
description=data['description']['en'],
images=CoingeckoImageURLs(
thumb=data['image']['thumb'],
small=data['image']['small'],
large=data['image']['large'],
),
)
except KeyError as e:
raise RemoteError(f'Missing expected key entry {e} in coingecko coin data response')
return parsed_data
def all_coins(self) -> List[Dict[str, Any]]:
return self._query(module='coins/list')
def simple_price(self, from_asset: Asset, to_asset: Asset) -> Price:
"""Returns a simple price for from_asset to to_asset in coingecko
Uses the simple/price endpoint of coingecko. If to_asset is not part of the
coingecko simple vs currencies or if from_asset is not supported in coingecko
price zero is returned.
May raise:
- RemoteError if there is a problem querying coingecko
"""
vs_currency = to_asset.identifier.lower()
if vs_currency not in COINGECKO_SIMPLE_VS_CURRENCIES:
log.warning(
f'Tried to query coingecko simple price from {from_asset.identifier} '
f'to {to_asset.identifier}. But to_asset is not supported in simple price query',
)
return Price(ZERO)
if from_asset.coingecko is None:
log.warning(
f'Tried to query coingecko simple price from {from_asset.identifier} '
f'to {to_asset.identifier}. But from_asset is not supported in coingecko',
)
return Price(ZERO)
result = self._query(
module='simple/price',
options={
'ids': from_asset.coingecko,
'vs_currencies': vs_currency,
})
try:
return Price(FVal(result[from_asset.coingecko][vs_currency]))
except KeyError as e:
log.warning(
f'Queried coingecko simple price from {from_asset.identifier} '
f'to {to_asset.identifier}. But got key error for {str(e)} when '
f'processing the result.',
)
return Price(ZERO)
| 27.733607 | 97 | 0.558889 |
0c95a22e246943817eab2eabc14c96cd064adbdf | 1,614 | py | Python | emh_code/tf-idf/lzd_tf-idf.py | NCBI-Hackathons/Virulence_AMR_Data_Index | 57f9c0a21fc5f78ef6dc73c978cd27ca61858dde | [
"MIT"
] | 1 | 2020-02-05T17:37:39.000Z | 2020-02-05T17:37:39.000Z | emh_code/tf-idf/lzd_tf-idf.py | NCBI-Hackathons/Virulence_AMR_Data_Index_Resource | 57f9c0a21fc5f78ef6dc73c978cd27ca61858dde | [
"MIT"
] | null | null | null | emh_code/tf-idf/lzd_tf-idf.py | NCBI-Hackathons/Virulence_AMR_Data_Index_Resource | 57f9c0a21fc5f78ef6dc73c978cd27ca61858dde | [
"MIT"
] | 2 | 2019-08-13T15:08:30.000Z | 2019-08-13T16:06:23.000Z | from math import log, ceil
import sys
import os
from random import shuffle
alphabet = 'gatc'
# Create a subsequence frequency dictionary using the Lempel-Ziv technique.
def lzd(s, d_init={k:0 for k in alphabet}):
d = dict(d_init.items())
i = 0
for n in range(1, len(s)):
if not s[i:n] in d:
d[s[i:n]] = 0
d[s[i:n-1]] = d.get(s[i:n-1], -1) + 1
i = n
return d
# Calculate C(x) based on input parameters.
if __name__ == "__main__":
dna_dir = sys.argv[1]
seqs = {}
tf = {}
idf = {}
tfidf = {}
dsz = 0
for d, xf in enumerate(os.listdir(dna_dir)):
x = open(dna_dir+xf).read()
x = x.lower()
x = ''.join([c for c in x if c in alphabet])
seqs[d] = x
xd = lzd(x)
dsz += len(xd)
sig = ceil(log(len(xd), len(alphabet)))
for t, v in xd.items():
if v > 0:
tf[(t, d)] = tf.get((t, d), 0) + 1
idf_cnt = idf.get(t, set())
idf_cnt.add(d)
idf[t] = idf_cnt
N = len(seqs)
for t, idf_cnt in idf.items():
idf[t] = log(N,2)-log(len(idf[t]),2)
for d in seqs.keys():
for t in idf.keys():
tfidf[(t,d)] = tf.get((t,d),0)*idf[t]
sig = ceil(log(dsz,4))
avg_sig = {}
items = list(tfidf.items())
shuffle(items)
for i, ((t, d), v) in enumerate(items[:100000]):
avg_sig[v] = avg_sig.get(v, []) + [len(t)]
for k, v in avg_sig.items():
avg_sig[k] = sum(v)/float(len(v))
print("\n".join([str(k)+" "+str(v) for k,v in avg_sig.items()]))
| 28.821429 | 75 | 0.495043 |
be801004abdace881807b4dd294aca7630564e4d | 925 | py | Python | wakeup.py | mcagriaksoy/fipy-GPS-Accelerometer-LoRa | f3fed3f4f64300f478c45ea6d4f74735d61566e3 | [
"MIT"
] | 10 | 2018-06-16T21:12:26.000Z | 2022-03-22T20:50:58.000Z | wakeup.py | mcagriaksoy/fipy-GPS-Accelerometer-LoRa | f3fed3f4f64300f478c45ea6d4f74735d61566e3 | [
"MIT"
] | null | null | null | wakeup.py | mcagriaksoy/fipy-GPS-Accelerometer-LoRa | f3fed3f4f64300f478c45ea6d4f74735d61566e3 | [
"MIT"
] | 6 | 2018-12-06T10:12:33.000Z | 2021-02-12T10:22:18.000Z | from pytrack import Pytrack
from LIS2HH12 import LIS2HH12
import time
py = Pytrack()
# display the reset reason code and the sleep remaining in seconds
# possible values of wakeup reason are:
# WAKE_REASON_ACCELEROMETER = 1
# WAKE_REASON_PUSH_BUTTON = 2
# WAKE_REASON_TIMER = 4
# WAKE_REASON_INT_PIN = 8
print("Wakeup reason: " + str(py.get_wake_reason()))
print("Approximate sleep remaining: " + str(py.get_sleep_remaining()) + " sec")
time.sleep(0.5)
# enable wakeup source from INT pin
py.setup_int_pin_wake_up(False)
acc = LIS2HH12()
# enable activity and also inactivity interrupts, using the default callback handler
py.setup_int_wake_up(True, True)
# set the acceleration threshold to 2000mG (2G) and the min duration to 200ms
acc.enable_activity_interrupt(2000, 200)
# go to sleep for 5 minutes maximum if no accelerometer interrupt happens
py.setup_sleep(300)
py.go_to_sleep()
| 28.90625 | 85 | 0.753514 |
ed7ccdbf0cbb38b74740e4c6ca02afcc0f6145bb | 839 | py | Python | where/lib/exceptions.py | ingridfausk/where | b65398911075b7ddef3a3a1146efa428eae498fe | [
"MIT"
] | 16 | 2018-08-31T10:31:11.000Z | 2022-03-15T16:07:24.000Z | where/lib/exceptions.py | ingridfausk/where | b65398911075b7ddef3a3a1146efa428eae498fe | [
"MIT"
] | 5 | 2018-07-13T14:04:24.000Z | 2021-06-17T02:14:44.000Z | where/lib/exceptions.py | ingridfausk/where | b65398911075b7ddef3a3a1146efa428eae498fe | [
"MIT"
] | 15 | 2018-06-07T05:45:24.000Z | 2022-03-15T16:07:27.000Z | """Definition of Where-specific exceptions
Description:
------------
Custom exceptions used by Where for more specific error messages and handling.
"""
from midgard.dev.exceptions import MidgardException # noqa
class WhereException(Exception):
pass
class WhereExit(SystemExit, WhereException):
pass
class InitializationError(WhereException):
pass
class InvalidArgsError(WhereException):
pass
class FieldExistsError(WhereException):
pass
class FieldDoesNotExistError(WhereException):
pass
class MissingDataError(WhereException):
pass
class UnknownEnumError(WhereException):
pass
class UnknownPipelineError(WhereException):
pass
class UnknownRadioSourceError(WhereException):
pass
class UnknownSiteError(WhereException):
pass
class UnitError(WhereException):
pass
| 14.220339 | 78 | 0.761621 |
f9257af77f53cdbb4d406169a84f5c8e2420490a | 19,281 | py | Python | throxy.py | proxies8888/proxytest-1 | 96aeef133d30dd91d3894fe1b12c557389b4d904 | [
"MIT"
] | 9 | 2015-02-25T01:25:53.000Z | 2022-01-27T03:01:35.000Z | throxy.py | proxies8888/proxytest-1 | 96aeef133d30dd91d3894fe1b12c557389b4d904 | [
"MIT"
] | null | null | null | throxy.py | proxies8888/proxytest-1 | 96aeef133d30dd91d3894fe1b12c557389b4d904 | [
"MIT"
] | 4 | 2016-11-05T04:49:34.000Z | 2020-09-30T18:02:16.000Z | #! /usr/bin/env python
# throxy.py - HTTP proxy to simulate dial-up access
# Copyright (c) 2007 Johann C. Rocholl <johann@rocholl.net>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Throxy: throttling HTTP proxy in one Python file
To use it, run this script on your local machine and adjust your
browser settings to use 127.0.0.1:8080 as HTTP proxy.
* Simulate a slow connection (like dial-up).
* Adjustable bandwidth limit for download and upload.
* Optionally dump HTTP headers and content for debugging.
* Decompress gzip content encoding for debugging.
* Multiple connections, without threads (uses asyncore).
* Only one source file, written in pure Python.
Simulate analog modem connection:
$ python throxy.py -u28.8 -d57.6
Show all HTTP headers (request & reply):
$ python throxy.py -qrs
Dump HTTP headers and content to a file, without size limits:
$ python throxy.py -rsRS -l0 -L0 -g0 > dump.txt
Tell command line tools to use the proxy:
$ export http_proxy=127.0.0.1:8080
"""
import sys
import asyncore
import socket
import time
import gzip
import struct
import cStringIO
import re
__revision__ = '$Rev$'
KILO = 1000 # decimal or binary kilo
request_match = re.compile(r'^([A-Z]+) (\S+) (HTTP/\S+)$').match
def debug(message, newline=True):
"""Print message to stderr and clear the rest of the line."""
if options.quiet:
return
if newline:
message = message.ljust(79) + '\n'
sys.stderr.write(message)
class Header:
"""HTTP (request or reply) header parser."""
def __init__(self):
self.data = ''
self.lines = []
self.complete = False
def append(self, new_data):
"""
Add more data to the header.
Any data after the end of the header is returned, as it may
contain content, or even the start of the next request.
"""
self.data += new_data
while not self.complete:
newline = self.data.find('\n')
if newline < 0:
break # No complete line found
line = self.data[:newline].rstrip('\r')
if len(line):
self.lines.append(line)
else:
self.complete = True
self.content_type = self.extract('Content-Type')
self.content_encoding = self.extract('Content-Encoding')
if self.content_encoding == 'gzip':
self.gzip_data = cStringIO.StringIO()
self.data = self.data[newline+1:]
if self.complete:
rest = self.data
self.data = ''
return rest
else:
return ''
def extract(self, name, default=''):
"""Extract a header field."""
name = name.lower()
for line in self.lines:
if not line.count(':'):
continue
key, value = line.split(':', 1)
if key.lower() == name:
return value.strip()
return default
def extract_host(self):
"""Extract host and perform DNS lookup."""
self.host = self.extract('Host')
if self.host is None:
return
if self.host.count(':'):
self.host_name, self.host_port = self.host.split(':')
self.host_port = int(self.host_port)
else:
self.host_name = self.host
self.host_port = 80
self.host_ip = socket.gethostbyname(self.host_name)
self.host_addr = (self.host_ip, self.host_port)
def extract_request(self):
"""Extract path from HTTP request."""
match = request_match(self.lines[0])
if not match:
raise ValueError("malformed request line " + self.lines[0])
self.method, self.url, self.proto = match.groups()
if self.method.upper() == 'CONNECT':
raise ValueError("method CONNECT is not supported")
prefix = 'http://' + self.host
if not self.url.startswith(prefix):
raise ValueError("URL doesn't start with " + prefix)
self.path = self.url[len(prefix):]
def dump_title(self, from_addr, to_addr, direction, what):
"""Print a title before dumping headers or content."""
print '==== %s %s (%s:%d => %s:%d) ====' % (
direction, what,
from_addr[0], from_addr[1],
to_addr[0], to_addr[1])
def dump(self, from_addr, to_addr, direction='sending'):
"""Dump header lines to stdout."""
self.dump_title(from_addr, to_addr, direction, 'headers')
print '\n'.join(self.lines)
print
def dump_content(self, content, from_addr, to_addr, direction='sending'):
"""Dump content to stdout."""
self.dump_title(from_addr, to_addr, direction, 'content')
if self.content_encoding:
print "(%d bytes of %s with %s encoding)" % (len(content),
repr(self.content_type), repr(self.content_encoding))
else:
print "(%d bytes of %s)" % (len(content), repr(self.content_type))
if self.content_encoding == 'gzip':
if options.gzip_size_limit == 0 or \
self.gzip_data.tell() < options.gzip_size_limit:
self.gzip_data.write(content)
try:
content = self.gunzip()
except IOError, error:
content = 'Could not gunzip: ' + str(error)
if self.content_type.startswith('text/'):
limit = options.text_dump_limit
elif self.content_type.startswith('application/') and \
self.content_type.count('xml'):
limit = options.text_dump_limit
else:
limit = options.data_dump_limit
content = repr(content)
if len(content) < limit or limit == 0:
print content
else:
print content[:limit] + '(showing only %d bytes)' % limit
print
def gunzip(self):
"""Decompress gzip content."""
if options.gzip_size_limit and \
self.gzip_data.tell() > options.gzip_size_limit:
raise IOError("More than %d bytes" % options.gzip_size_limit)
self.gzip_data.seek(0) # Seek to start of data
try:
gzip_file = gzip.GzipFile(
fileobj=self.gzip_data, mode='rb')
result = gzip_file.read()
gzip_file.close()
except struct.error:
raise IOError("Caught struct.error from gzip module")
self.gzip_data.seek(0, 2) # Seek to end of data
return result
class Throttle:
"""Bandwidth limit tracker."""
def __init__(self, kbps, interval=1.0):
self.bytes_per_second = int(kbps * KILO) / 8
self.interval = interval
self.fragment_size = min(512, self.bytes_per_second / 4)
self.transmit_log = []
self.weighted_throughput = 0.0
self.real_throughput = 0
self.last_updated = time.time()
def update_throughput(self, now):
"""Update weighted and real throughput."""
self.weighted_throughput = 0.0
self.real_throughput = 0
for timestamp, bytes in self.transmit_log:
# Event's age in seconds
age = now - timestamp
if age > self.interval:
continue
# Newer entries count more
weight = 2.0 * (self.interval - age) / self.interval
self.weighted_throughput += bytes * weight
self.real_throughput += bytes
self.last_updated = now
def trim_log(self):
"""Forget transmit log entries that are too old."""
now = time.time()
horizon = now - self.interval
popped = 0
while len(self.transmit_log) and self.transmit_log[0][0] <= horizon:
self.transmit_log.pop(0)
popped += 1
if popped or now - self.last_updated > 0.1:
self.update_throughput(now)
def log_sent_bytes(self, bytes):
"""Add timestamp and byte count to transmit log."""
self.transmit_log.append((time.time(), bytes))
self.update_throughput(time.time())
def sendable(self):
"""How many bytes can we send without exceeding bandwidth?"""
self.trim_log()
weighted_bytes = int(self.weighted_throughput / self.interval)
return max(0, self.bytes_per_second - weighted_bytes)
def weighted_kbps(self):
"""Compute recent bandwidth usage, in kbps."""
self.trim_log()
return 8 * self.weighted_throughput / float(KILO) / self.interval
def real_kbps(self):
"""Compute recent bandwidth usage, in kbps."""
self.trim_log()
return 8 * self.real_throughput / float(KILO) / self.interval
class ThrottleSender(asyncore.dispatcher):
"""Data connection with send buffer and bandwidth limit."""
def __init__(self, throttle, channel=None):
self.throttle = throttle
if channel is None:
asyncore.dispatcher.__init__(self)
else:
asyncore.dispatcher.__init__(self, channel)
self.buffer = []
self.should_close = False
def writable(self):
"""Check if this channel is ready to write some data."""
return (len(self.buffer) and
self.throttle.sendable() / 2 > self.throttle.fragment_size)
def handle_write(self):
"""Write some data to the socket."""
max_bytes = self.throttle.sendable() / 2
if max_bytes < self.throttle.fragment_size:
return
bytes = self.send(self.buffer[0][:max_bytes])
self.throttle.log_sent_bytes(bytes)
if bytes == len(self.buffer[0]):
self.buffer.pop(0)
else:
self.buffer[0] = self.buffer[0][bytes:]
self.check_close()
def check_close(self):
"""Close if requested and all data was sent."""
if self.should_close and len(self.buffer) == 0:
self.close()
class ClientChannel(ThrottleSender):
"""A client connection."""
def __init__(self, channel, addr, download_throttle, upload_throttle):
ThrottleSender.__init__(self, download_throttle, channel)
self.upload_throttle = upload_throttle
self.addr = addr
self.header = Header()
self.content_length = 0
self.server = None
self.handle_connect()
def readable(self):
"""Check if this channel is ready to receive some data."""
return self.server is None or len(self.server.buffer) == 0
def handle_read(self):
"""Read some data from the client."""
data = self.recv(8192)
while len(data):
if self.content_length:
bytes = min(self.content_length, len(data))
self.server.buffer.append(data[:bytes])
if options.dump_send_content:
self.header.dump_content(
data[:bytes], self.addr, self.header.host_addr)
data = data[bytes:]
self.content_length -= bytes
if not len(data):
break
if self.header.complete and self.content_length == 0:
debug("client %s:%d sends a new request" % self.addr)
self.header = Header()
self.server = None
data = self.header.append(data)
if self.header.complete:
self.content_length = int(
self.header.extract('Content-Length', 0))
self.header.extract_host()
if options.dump_send_headers:
self.header.dump(self.addr, self.header.host_addr)
self.server = ServerChannel(
self, self.header, self.upload_throttle)
def handle_connect(self):
"""Print connect message to stderr."""
debug("client %s:%d connected" % self.addr)
def handle_close(self):
"""Print disconnect message to stderr."""
self.close()
debug("client %s:%d disconnected" % self.addr)
class ServerChannel(ThrottleSender):
"""Connection to HTTP server."""
def __init__(self, client, header, upload_throttle):
ThrottleSender.__init__(self, upload_throttle)
self.client = client
self.addr = header.host_addr
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect(self.addr)
self.send_header(header)
self.header = Header()
def send_header(self, header):
"""Send HTTP request header to the server."""
header.extract_request()
self.send_line(' '.join(
(header.method, header.path, header.proto)))
self.send_line('Connection: close')
for line in header.lines[1:]:
if not (line.startswith('Keep-Alive: ') or
line.startswith('Connection: ') or
line.startswith('Proxy-')):
self.send_line(line)
self.send_line('')
def send_line(self, line):
"""Send one line of the request header to the server."""
self.buffer.append(line + '\r\n')
def receive_header(self, header):
"""Send HTTP reply header to the client."""
for line in header.lines:
if not (line.startswith('Keep-Alive: ') or
line.startswith('Connection: ') or
line.startswith('Proxy-')):
self.receive_line(line)
self.receive_line('')
def receive_line(self, line):
"""Send one line of the reply header to the client."""
self.client.buffer.append(line + '\r\n')
def readable(self):
"""Check if this channel is ready to receive some data."""
return len(self.client.buffer) == 0
def handle_read(self):
"""Read some data from the server."""
data = self.recv(8192)
if not self.header.complete:
data = self.header.append(data)
if self.header.complete:
if options.dump_recv_headers:
self.header.dump(self.addr, self.client.addr, 'receiving')
self.receive_header(self.header)
if self.header.complete and len(data):
if options.dump_recv_content:
self.header.dump_content(
data, self.addr, self.client.addr, 'receiving')
self.client.buffer.append(data)
def handle_connect(self):
"""Print connect message to stderr."""
debug("server %s:%d connected" % self.addr)
def handle_close(self):
"""Print disconnect message to stderr."""
self.close()
debug("server %s:%d disconnected" % self.addr)
if self.header.extract('Connection').lower() == 'close':
self.client.should_close = True
self.client.check_close()
class ProxyServer(asyncore.dispatcher):
"""Listen for client connections."""
def __init__(self):
asyncore.dispatcher.__init__(self)
self.download_throttle = Throttle(options.download)
self.upload_throttle = Throttle(options.upload)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr = (options.interface, options.port)
self.bind(self.addr)
self.listen(5)
debug("listening on %s:%d" % self.addr)
def readable(self):
debug('%8.1f kbps up %8.1f kbps down\r' % (
self.upload_throttle.real_kbps(),
self.download_throttle.real_kbps(),
), newline=False)
return True
def handle_accept(self):
"""Accept a new connection from a client."""
channel, addr = self.accept()
if addr[0] == '127.0.0.1' or options.allow_remote:
ClientChannel(channel, addr,
self.download_throttle, self.upload_throttle)
else:
channel.close()
debug("remote client %s:%d not allowed" % addr)
if __name__ == '__main__':
from optparse import OptionParser
version = '%prog ' + __revision__.strip('$').replace('Rev: ', 'r')
parser = OptionParser(version=version)
parser.add_option('-i', dest='interface', action='store', type='string',
metavar='<ip>', default='',
help="listen on this interface only (default all)")
parser.add_option('-p', dest='port', action='store', type='int',
metavar='<port>', default=8080,
help="listen on this port number (default 8080)")
parser.add_option('-d', dest='download', action='store', type='float',
metavar='<kbps>', default=28.8,
help="download bandwidth in kbps (default 28.8)")
parser.add_option('-u', dest='upload', action='store', type='float',
metavar='<kbps>', default=28.8,
help="upload bandwidth in kbps (default 28.8)")
parser.add_option('-o', dest='allow_remote', action='store_true',
help="allow remote clients (WARNING: open proxy)")
parser.add_option('-q', dest='quiet', action='store_true',
help="don't show connect and disconnect messages")
parser.add_option('-s', dest='dump_send_headers', action='store_true',
help="dump headers sent to server")
parser.add_option('-r', dest='dump_recv_headers', action='store_true',
help="dump headers received from server")
parser.add_option('-S', dest='dump_send_content', action='store_true',
help="dump content sent to server")
parser.add_option('-R', dest='dump_recv_content', action='store_true',
help="dump content received from server")
parser.add_option('-l', dest='text_dump_limit', action='store',
metavar='<bytes>', type='int', default=1024,
help="maximum length of dumped text content (default 1024)")
parser.add_option('-L', dest='data_dump_limit', action='store',
metavar='<bytes>', type='int', default=256,
help="maximum length of dumped binary content (default 256)")
parser.add_option('-g', dest='gzip_size_limit', action='store',
metavar='<bytes>', type='int', default=8192,
help="maximum size for gzip decompression (default 8192)")
options, args = parser.parse_args()
proxy = ProxyServer()
try:
asyncore.loop(timeout=0.1)
except:
proxy.shutdown(2)
proxy.close()
raise
| 37.731898 | 78 | 0.605985 |
a09a220d0b32ec8e7dfa7ffab568ef24ea43a67d | 916 | py | Python | scripts/web.py | 452/micropython-examples | b366d6be43d69664d8e0d09aa67b6efde5fc3a16 | [
"Apache-2.0"
] | null | null | null | scripts/web.py | 452/micropython-examples | b366d6be43d69664d8e0d09aa67b6efde5fc3a16 | [
"Apache-2.0"
] | null | null | null | scripts/web.py | 452/micropython-examples | b366d6be43d69664d8e0d09aa67b6efde5fc3a16 | [
"Apache-2.0"
] | null | null | null | def s():
# minimal Ajax in Control Webserver
CONTENT = """\
HTTP/1.0 200 OK
Hello #{} from MicroPython!
"""
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 80))
s.listen(0) # just queue up some requests
counter=0
while True:
conn, addr = s.accept()
print("Got a connection from %s" % str(addr))
request = conn.recv(1024)
conn.sendall('HTTP/1.1 200 OK\nConnection: close\nServer: nanoWiPy\nContent-Type: text/html\n\n')
## print("Content = %s" % str(request))
request = str(request)
ib = request.find('Val=')
if ib > 0 :
ie = request.find(' ', ib)
Val = request[ib+4:ie]
print("Val =", Val)
conn.send(Val)
else:
# conn.send(bytes(CONTENT.format(counter), "ascii"))
with open('/lib/AiCWebpage.htm', 'r') as html:
conn.send(html.read())
conn.sendall('\n')
conn.close()
print("Connection wth %s closed" % str(addr))
counter += 1
| 27.757576 | 99 | 0.637555 |
47a92cf0aecd6a551276d6ca794c2ad10a0cf828 | 17,847 | py | Python | launcher.py | edgarp413/apo-bot | da3d09e02fc048714f5731f0f5d36e1d814b3bf9 | [
"MIT"
] | null | null | null | launcher.py | edgarp413/apo-bot | da3d09e02fc048714f5731f0f5d36e1d814b3bf9 | [
"MIT"
] | null | null | null | launcher.py | edgarp413/apo-bot | da3d09e02fc048714f5731f0f5d36e1d814b3bf9 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os
import sys
import subprocess
try: # Older Pythons lack this
import urllib.request # We'll let them reach the Python
from importlib.util import find_spec # check anyway
except ImportError:
pass
import platform
import webbrowser
import hashlib
import argparse
import shutil
import stat
import time
try:
import pip
except ImportError:
pip = None
REQS_DIR = "lib"
sys.path.insert(0, REQS_DIR)
REQS_TXT = "requirements.txt"
REQS_NO_AUDIO_TXT = "requirements_no_audio.txt"
FFMPEG_BUILDS_URL = "https://ffmpeg.zeranoe.com/builds/"
INTRO = ("==========================\n"
"Red Discord Bot - Launcher\n"
"==========================\n")
IS_WINDOWS = os.name == "nt"
IS_MAC = sys.platform == "darwin"
IS_64BIT = platform.machine().endswith("64")
INTERACTIVE_MODE = not len(sys.argv) > 1 # CLI flags = non-interactive
PYTHON_OK = sys.version_info >= (3, 5)
FFMPEG_FILES = {
"ffmpeg.exe" : "e0d60f7c0d27ad9d7472ddf13e78dc89",
"ffplay.exe" : "d100abe8281cbcc3e6aebe550c675e09",
"ffprobe.exe" : "0e84b782c0346a98434ed476e937764f"
}
def parse_cli_arguments():
parser = argparse.ArgumentParser(description="Red - Discord Bot's launcher")
parser.add_argument("--start", "-s",
help="Starts Red",
action="store_true")
parser.add_argument("--auto-restart",
help="Autorestarts Red in case of issues",
action="store_true")
parser.add_argument("--update-red",
help="Updates Red (git)",
action="store_true")
parser.add_argument("--update-reqs",
help="Updates requirements (w/ audio)",
action="store_true")
parser.add_argument("--update-reqs-no-audio",
help="Updates requirements (w/o audio)",
action="store_true")
parser.add_argument("--repair",
help="Issues a git reset --hard",
action="store_true")
return parser.parse_args()
def install_reqs(audio):
remove_reqs_readonly()
interpreter = sys.executable
if interpreter is None:
print("Python interpreter not found.")
return
txt = REQS_TXT if audio else REQS_NO_AUDIO_TXT
args = [
interpreter, "-m",
"pip", "install",
"--upgrade",
"--target", REQS_DIR,
"-r", txt
]
if IS_MAC: # --target is a problem on Homebrew. See PR #552
args.remove("--target")
args.remove(REQS_DIR)
code = subprocess.call(args)
if code == 0:
print("\nRequirements setup completed.")
else:
print("\nAn error occurred and the requirements setup might "
"not be completed. Consult the docs.\n")
def update_pip():
interpreter = sys.executable
if interpreter is None:
print("Python interpreter not found.")
return
args = [
interpreter, "-m",
"pip", "install",
"--upgrade", "pip"
]
code = subprocess.call(args)
if code == 0:
print("\nPip has been updated.")
else:
print("\nAn error occurred and pip might not have been updated.")
def update_red():
try:
code = subprocess.call(("git", "pull", "--ff-only"))
except FileNotFoundError:
print("\nError: Git not found. It's either not installed or not in "
"the PATH environment variable like requested in the guide.")
return
if code == 0:
print("\nRed has been updated")
else:
print("\nRed could not update properly. If this is caused by edits "
"you have made to the code you can try the repair option from "
"the Maintenance submenu")
def reset_red(reqs=False, data=False, cogs=False, git_reset=False):
if reqs:
try:
shutil.rmtree(REQS_DIR, onerror=remove_readonly)
print("Installed local packages have been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove installed "
"requirements: {}".format(e))
if data:
try:
shutil.rmtree("data", onerror=remove_readonly)
print("'data' folder has been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove the 'data' folder: "
"{}".format(e))
if cogs:
try:
shutil.rmtree("cogs", onerror=remove_readonly)
print("'cogs' folder has been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove the 'cogs' folder: "
"{}".format(e))
if git_reset:
code = subprocess.call(("git", "reset", "--hard"))
if code == 0:
print("Red has been restored to the last local commit.")
else:
print("The repair has failed.")
def download_ffmpeg(bitness):
clear_screen()
repo = "https://github.com/Twentysix26/Red-DiscordBot/raw/master/"
verified = []
if bitness == "32bit":
print("Please download 'ffmpeg 32bit static' from the page that "
"is about to open.\nOnce done, open the 'bin' folder located "
"inside the zip.\nThere should be 3 files: ffmpeg.exe, "
"ffplay.exe, ffprobe.exe.\nPut all three of them into the "
"bot's main folder.")
time.sleep(4)
webbrowser.open(FFMPEG_BUILDS_URL)
return
for filename in FFMPEG_FILES:
if os.path.isfile(filename):
print("{} already present. Verifying integrity... "
"".format(filename), end="")
_hash = calculate_md5(filename)
if _hash == FFMPEG_FILES[filename]:
verified.append(filename)
print("Ok")
continue
else:
print("Hash mismatch. Redownloading.")
print("Downloading {}... Please wait.".format(filename))
with urllib.request.urlopen(repo + filename) as data:
with open(filename, "wb") as f:
f.write(data.read())
print("Download completed.")
for filename, _hash in FFMPEG_FILES.items():
if filename in verified:
continue
print("Verifying {}... ".format(filename), end="")
if not calculate_md5(filename) != _hash:
print("Passed.")
else:
print("Hash mismatch. Please redownload.")
print("\nAll files have been downloaded.")
def verify_requirements():
sys.path_importer_cache = {} # I don't know if the cache reset has any
basic = find_spec("discord") # side effect. Without it, the lib folder
audio = find_spec("nacl") # wouldn't be seen if it didn't exist
if not basic: # when the launcher was started
return None
elif not audio:
return False
else:
return True
def is_git_installed():
try:
subprocess.call(["git", "--version"], stdout=subprocess.DEVNULL,
stdin =subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
except FileNotFoundError:
return False
else:
return True
def requirements_menu():
clear_screen()
while True:
print(INTRO)
print("Main requirements:\n")
print("1. Install basic + audio requirements (recommended)")
print("2. Install basic requirements")
if IS_WINDOWS:
print("\nffmpeg (required for audio):")
print("3. Install ffmpeg 32bit")
if IS_64BIT:
print("4. Install ffmpeg 64bit (recommended on Windows 64bit)")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
install_reqs(audio=True)
wait()
elif choice == "2":
install_reqs(audio=False)
wait()
elif choice == "3" and IS_WINDOWS:
download_ffmpeg(bitness="32bit")
wait()
elif choice == "4" and (IS_WINDOWS and IS_64BIT):
download_ffmpeg(bitness="64bit")
wait()
elif choice == "0":
break
clear_screen()
def update_menu():
clear_screen()
while True:
print(INTRO)
reqs = verify_requirements()
if reqs is None:
status = "No requirements installed"
elif reqs is False:
status = "Basic requirements installed (no audio)"
else:
status = "Basic + audio requirements installed"
print("Status: " + status + "\n")
print("Update:\n")
print("Red:")
print("1. Update Red + requirements (recommended)")
print("2. Update Red")
print("3. Update requirements")
print("\nOthers:")
print("4. Update pip (might require admin privileges)")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
update_red()
print("Updating requirements...")
reqs = verify_requirements()
if reqs is not None:
install_reqs(audio=reqs)
else:
print("The requirements haven't been installed yet.")
wait()
elif choice == "2":
update_red()
wait()
elif choice == "3":
reqs = verify_requirements()
if reqs is not None:
install_reqs(audio=reqs)
else:
print("The requirements haven't been installed yet.")
wait()
elif choice == "4":
update_pip()
wait()
elif choice == "0":
break
clear_screen()
def maintenance_menu():
clear_screen()
while True:
print(INTRO)
print("Maintenance:\n")
print("1. Repair Red (discards code changes, keeps data intact)")
print("2. Wipe 'data' folder (all settings, cogs' data...)")
print("3. Wipe 'lib' folder (all local requirements / local installed"
" python packages)")
print("4. Factory reset")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
print("Any code modification you have made will be lost. Data/"
"non-default cogs will be left intact. Are you sure?")
if user_pick_yes_no():
reset_red(git_reset=True)
wait()
elif choice == "2":
print("Are you sure? This will wipe the 'data' folder, which "
"contains all your settings and cogs' data.\nThe 'cogs' "
"folder, however, will be left intact.")
if user_pick_yes_no():
reset_red(data=True)
wait()
elif choice == "3":
reset_red(reqs=True)
wait()
elif choice == "4":
print("Are you sure? This will wipe ALL your Red's installation "
"data.\nYou'll lose all your settings, cogs and any "
"modification you have made.\nThere is no going back.")
if user_pick_yes_no():
reset_red(reqs=True, data=True, cogs=True, git_reset=True)
wait()
elif choice == "0":
break
clear_screen()
def run_red(autorestart):
interpreter = sys.executable
if interpreter is None: # This should never happen
raise RuntimeError("Couldn't find Python's interpreter")
if verify_requirements() is None:
print("You don't have the requirements to start Red. "
"Install them from the launcher.")
if not INTERACTIVE_MODE:
exit(1)
cmd = (interpreter, "red.py")
while True:
try:
code = subprocess.call(cmd)
except KeyboardInterrupt:
code = 0
break
else:
if code == 0:
break
elif code == 26:
print("Restarting Red...")
continue
else:
if not autorestart:
break
print("Red has been terminated. Exit code: %d" % code)
if INTERACTIVE_MODE:
wait()
def clear_screen():
if IS_WINDOWS:
os.system("cls")
else:
os.system("clear")
def wait():
if INTERACTIVE_MODE:
input("Press enter to continue.")
def user_choice():
return input("> ").lower().strip()
def user_pick_yes_no():
choice = None
yes = ("yes", "y")
no = ("no", "n")
while choice not in yes and choice not in no:
choice = input("Yes/No > ").lower().strip()
return choice in yes
def remove_readonly(func, path, excinfo):
os.chmod(path, 0o755)
func(path)
def remove_reqs_readonly():
"""Workaround for issue #569"""
if not os.path.isdir(REQS_DIR):
return
os.chmod(REQS_DIR, 0o755)
for root, dirs, files in os.walk(REQS_DIR):
for d in dirs:
os.chmod(os.path.join(root, d), 0o755)
for f in files:
os.chmod(os.path.join(root, f), 0o755)
def calculate_md5(filename):
hash_md5 = hashlib.md5()
with open(filename, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def create_fast_start_scripts():
"""Creates scripts for fast boot of Red without going
through the launcher"""
interpreter = sys.executable
if not interpreter:
return
call = "\"{}\" launcher.py".format(interpreter)
start_red = "{} --start".format(call)
start_red_autorestart = "{} --start --auto-restart".format(call)
modified = False
if IS_WINDOWS:
ccd = "pushd %~dp0\n"
pause = "\npause"
ext = ".bat"
else:
ccd = 'cd "$(dirname "$0")"\n'
pause = "\nread -rsp $'Press enter to continue...\\n'"
if not IS_MAC:
ext = ".sh"
else:
ext = ".command"
start_red = ccd + start_red + pause
start_red_autorestart = ccd + start_red_autorestart + pause
files = {
"start_red" + ext : start_red,
"start_red_autorestart" + ext : start_red_autorestart
}
if not IS_WINDOWS:
files["start_launcher" + ext] = ccd + call
for filename, content in files.items():
if not os.path.isfile(filename):
print("Creating {}... (fast start scripts)".format(filename))
modified = True
with open(filename, "w") as f:
f.write(content)
if not IS_WINDOWS and modified: # Let's make them executable on Unix
for script in files:
st = os.stat(script)
os.chmod(script, st.st_mode | stat.S_IEXEC)
def main():
print("Verifying git installation...")
has_git = is_git_installed()
is_git_installation = os.path.isdir(".git")
if IS_WINDOWS:
os.system("TITLE Red Discord Bot - Launcher")
clear_screen()
try:
create_fast_start_scripts()
except Exception as e:
print("Failed making fast start scripts: {}\n".format(e))
while True:
print(INTRO)
if not is_git_installation:
print("WARNING: It doesn't look like Red has been "
"installed with git.\nThis means that you won't "
"be able to update and some features won't be working.\n"
"A reinstallation is recommended. Follow the guide "
"properly this time:\n"
"https://twentysix26.github.io/Red-Docs/\n")
if not has_git:
print("WARNING: Git not found. This means that it's either not "
"installed or not in the PATH environment variable like "
"requested in the guide.\n")
print("1. Run Red /w autorestart in case of issues")
print("2. Run Red")
print("3. Update")
print("4. Install requirements")
print("5. Maintenance (repair, reset...)")
print("\n0. Quit")
choice = user_choice()
if choice == "1":
run_red(autorestart=True)
elif choice == "2":
run_red(autorestart=False)
elif choice == "3":
update_menu()
elif choice == "4":
requirements_menu()
elif choice == "5":
maintenance_menu()
elif choice == "0":
break
clear_screen()
args = parse_cli_arguments()
if __name__ == '__main__':
abspath = os.path.abspath(__file__)
dirname = os.path.dirname(abspath)
# Sets current directory to the script's
os.chdir(dirname)
if not PYTHON_OK:
print("Red needs Python 3.5 or superior. Install the required "
"version.\nPress enter to continue.")
if INTERACTIVE_MODE:
wait()
exit(1)
if pip is None:
print("Red cannot work without the pip module. Please make sure to "
"install Python without unchecking any option during the setup")
wait()
exit(1)
if args.repair:
reset_red(git_reset=True)
if args.update_red:
update_red()
if args.update_reqs:
install_reqs(audio=True)
elif args.update_reqs_no_audio:
install_reqs(audio=False)
if INTERACTIVE_MODE:
main()
elif args.start:
print("Starting Red...")
run_red(autorestart=args.auto_restart)
| 31.092334 | 80 | 0.558525 |
687c0ab018691caa8ba9e2d46c6051bbbf92d757 | 6,275 | py | Python | custom/m4change/user_calcs/all_hmis_report_calcs.py | dslowikowski/commcare-hq | ad8885cf8dab69dc85cb64f37aeaf06106124797 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T23:26:39.000Z | 2015-02-10T23:26:39.000Z | custom/m4change/user_calcs/all_hmis_report_calcs.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | 1 | 2022-03-12T01:03:25.000Z | 2022-03-12T01:03:25.000Z | custom/m4change/user_calcs/all_hmis_report_calcs.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | import fluff
from custom.m4change.constants import PMTCT_CLIENTS_FORM
from custom.m4change.user_calcs import get_date_delivery, form_passes_filter_date_delivery, get_received_on
from datetime import datetime
def _get_comparison_results(field_value, comparison_operator, expected_value):
result = True
if isinstance(expected_value, list):
for expected_value_item in expected_value:
if not comparison_operator(field_value, expected_value_item):
result = False
break
elif not comparison_operator(field_value, expected_value):
result = False
return result
class FormComparisonCalculator(fluff.Calculator):
def __init__(self, comparisons, namespaces, filter_function=None, joint=True, *args, **kwargs):
self.comparisons = comparisons
self.namespaces = namespaces
self.filter_function = filter_function
self.get_date_function = get_date_delivery if self.filter_function is form_passes_filter_date_delivery else get_received_on
self.joint = joint
super(FormComparisonCalculator, self).__init__(*args, **kwargs)
@fluff.date_emitter
def total(self, form):
if form.xmlns in self.namespaces and (self.filter_function is None or self.filter_function(form)):
all_filters_passed = True
if self.joint:
for c in self.comparisons:
field_value = form.form.get(c[0], "")
if field_value is None:
field_value = ""
if not _get_comparison_results(field_value, c[1], c[2]):
all_filters_passed = False
break
if all_filters_passed:
yield [self.get_date_function(form), 1]
else:
all_filters_passed = False
for c in self.comparisons:
field_value = form.form.get(c[0], "")
if field_value is None:
field_value = ""
if _get_comparison_results(field_value, c[1], c[2]):
all_filters_passed = True
break
if all_filters_passed:
yield [self.get_date_function(form), 1]
def _get_child_date_delivery(form):
child_date_delivery = form.form.get("child_date_delivery", None)
return child_date_delivery if child_date_delivery else None
class InfantsBornToHivInfectedWomenCotrimoxazoleLt2Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("commenced_drugs", None) is not None:
commenced_drugs = form.form.get("commenced_drugs", "")
if "infant_cotrimoxazole" in commenced_drugs:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days < 60:
yield [received_on, 1]
class InfantsBornToHivInfectedWomenCotrimoxazoleGte2Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("commenced_drugs", None) is not None:
commenced_drugs = form.form.get("commenced_drugs", "")
if "infant_cotrimoxazole" in commenced_drugs:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days >= 60:
yield [received_on, 1]
class InfantsBornToHivInfectedWomenReceivedHivTestLt2Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("infant_dps", None) is not None:
infant_dps = form.form.get("infant_dps", "")
if infant_dps in ["positive", "negative"]:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days < 60:
yield [received_on, 1]
class InfantsBornToHivInfectedWomenReceivedHivTestGte2Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("infant_dps", None) is not None:
infant_dps = form.form.get("infant_dps", "")
if infant_dps in ["positive", "negative"]:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days >= 60:
yield [received_on, 1]
class InfantsBornToHivInfectedWomenReceivedHivTestLt18Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("infant_rapid_test", None) is not None:
infant_rapid_test = form.form.get("infant_rapid_test", "")
if infant_rapid_test in ["positive", "negative"]:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days / 30 < 18:
yield [received_on, 1]
class InfantsBornToHivInfectedWomenReceivedHivTestGte18Months(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
if form.xmlns == PMTCT_CLIENTS_FORM and form.form.get("infant_rapid_test", None) is not None:
infant_rapid_test = form.form.get("infant_rapid_test", "")
if infant_rapid_test in ["positive", "negative"]:
date_delivery = _get_child_date_delivery(form)
if date_delivery is not None:
received_on = get_received_on(form)
if (received_on - date_delivery).days / 30 >= 18:
yield [received_on, 1]
| 44.190141 | 131 | 0.626135 |
0c3999936907605451d4190c259571ad01fdb843 | 541 | py | Python | config.py | Narasimhareddy26/imaginary | 5287d4b5ffeb7ec12d8eca834290d62b64a64c55 | [
"MIT"
] | 1 | 2019-10-27T14:05:30.000Z | 2019-10-27T14:05:30.000Z | config.py | Narasimhareddy26/imaginary | 5287d4b5ffeb7ec12d8eca834290d62b64a64c55 | [
"MIT"
] | null | null | null | config.py | Narasimhareddy26/imaginary | 5287d4b5ffeb7ec12d8eca834290d62b64a64c55 | [
"MIT"
] | 2 | 2018-01-25T00:37:31.000Z | 2019-10-27T14:05:32.000Z | """
@author Victor I. Afolabi
A.I. Engineer & Software developer
javafolabi@gmail.com
Created on 01 February, 2018 @ 2:40 PM.
Copyright © 2018. Victor. All rights reserved.
"""
from passlib.hash import sha256_crypt
from settings import APP_NAME
class Base(object):
DEBUG = False
TESTING = False
SECRET_KEY = sha256_crypt.encrypt(APP_NAME)
class Production(Base):
DATABASE_URI = f'mysql://victor@localhost/{APP_NAME}'
class Development(Base):
DEBUG = True
class Testing(Base):
TESTING = True
| 16.90625 | 57 | 0.698706 |
70820c80e35e128547803fd2d5d18d10796a82ee | 3,803 | py | Python | eoxserver/resources/coverages/metadata/formats/eoom.py | constantinius/eoxserver_combined | 68f261133fed65a4e8a6ddba82b0d2845171e4bf | [
"OML"
] | null | null | null | eoxserver/resources/coverages/metadata/formats/eoom.py | constantinius/eoxserver_combined | 68f261133fed65a4e8a6ddba82b0d2845171e4bf | [
"OML"
] | null | null | null | eoxserver/resources/coverages/metadata/formats/eoom.py | constantinius/eoxserver_combined | 68f261133fed65a4e8a6ddba82b0d2845171e4bf | [
"OML"
] | null | null | null | #-------------------------------------------------------------------------------
#
# Project: EOxServer <http://eoxserver.org>
# Authors: Fabian Schindler <fabian.schindler@eox.at>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2013 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
from django.contrib.gis.geos import Polygon, MultiPolygon
from eoxserver.core.util.timetools import parse_iso8601
from eoxserver.core.util.xmltools import parse, NameSpace, NameSpaceMap
from eoxserver.core.util.iteratortools import pairwise
from eoxserver.core import Component, implements
from eoxserver.core.decoders import xml
from eoxserver.resources.coverages.metadata.interfaces import (
MetadataReaderInterface
)
NS_EOP = NameSpace("http://www.opengis.net/eop/2.0", "eop")
NS_OM = NameSpace("http://www.opengis.net/om/2.0", "om")
NS_GML = NameSpace("http://www.opengis.net/gml/3.2", "gml")
nsmap = NameSpaceMap(NS_EOP, NS_OM, NS_GML)
class EOOMFormatReader(Component):
implements(MetadataReaderInterface)
def test(self, obj):
tree = parse(obj)
return tree is not None and tree.getroot().tag == NS_EOP("EarthObservation")
def read(self, obj):
tree = parse(obj)
if tree is not None:
decoder = EOOMFormatDecoder(tree)
return {
"identifier": decoder.identifier,
"begin_time": decoder.begin_time,
"end_time": decoder.end_time,
"footprint": MultiPolygon(*decoder.polygons),
"format": "eogml"
}
raise Exception("Could not parse from obj '%s'." % repr(obj))
def parse_polygon_xml(elem):
return Polygon(
parse_ring(elem.xpath("gml:exterior/gml:LinearRing/gml:posList", namespaces=nsmap)[0].text),
*map(lambda e: parse_ring(e.text), elem.xpath("gml:interior/gml:LinearRing/gml:posList", namespaces=nsmap))
)
def parse_ring(string):
points = []
raw_coords = map(float, string.split(" "))
return [(lon, lat) for lat, lon in pairwise(raw_coords)]
class EOOMFormatDecoder(xml.Decoder):
identifier = xml.Parameter("eop:metaDataProperty/eop:EarthObservationMetaData/eop:identifier/text()", type=str, num=1)
begin_time = xml.Parameter("om:phenomenonTime/gml:TimePeriod/gml:beginPosition/text()", type=parse_iso8601, num=1)
end_time = xml.Parameter("om:phenomenonTime/gml:TimePeriod/gml:endPosition/text()", type=parse_iso8601, num=1)
polygons = xml.Parameter("om:featureOfInterest/eop:Footprint/eop:multiExtentOf/gml:MultiSurface/gml:surfaceMember/gml:Polygon", type=parse_polygon_xml, num="+")
namespaces = nsmap
| 44.22093 | 164 | 0.679464 |
5ef5bf82fbc39c883761d935eb2d713adb075dbf | 611 | py | Python | main/generator_key.py | genisyskernel/random-key-twitch | f90d4b551992c4bced5cd769806dcb6db3bacb1d | [
"MIT"
] | 1 | 2020-11-28T05:58:57.000Z | 2020-11-28T05:58:57.000Z | main/generator_key.py | genisyskernel/random-key-twitch | f90d4b551992c4bced5cd769806dcb6db3bacb1d | [
"MIT"
] | null | null | null | main/generator_key.py | genisyskernel/random-key-twitch | f90d4b551992c4bced5cd769806dcb6db3bacb1d | [
"MIT"
] | null | null | null | from random import randint, choice
import string
chars_upper = string.ascii_uppercase
chars_lower = string.ascii_lowercase
chars_letters = string.ascii_letters
chars_numbers = string.digits
key0 = "live"
key1 = randint(000000000, 999999999)
key_main = chars_letters + chars_numbers
key2 = "".join(choice(key_main) for _ in range(30))
#print(f"{chars_lower} | {chars_upper} | {chars_letters} | {chars_numbers}")
print(f"{key0}_{key1}_{key2}")
"""
live_237506076_QJ2c4bm59bwito3B5vM2wOUJFmm0uS
live_486865329_b18I1GSIyMHvwkeYKxhUMVUvlATYDr
live_123456789_abcdefghijklmnopqrsu0123456789
live_9[09]_30[aA09]
""" | 27.772727 | 76 | 0.805237 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.